Exemplo n.º 1
0
def main():
    with open("compost.txt", "r") as f:
        strings = f.read().split()
    while (strings):
        response = NLU.analyze(text=strings.pop(0),
                               features=[
                                   features.Keywords(),
                                   features.Categories(),
                                   features.Concepts()
                               ])
        print str(response.keywords[0].text)
Exemplo n.º 2
0
def analyze_url(url):
    try:
        response = natural_language_understanding.analyze(
            url=url, features=[features.Keywords(),
                               features.Categories()])
    except WatsonException as e:
        print("analyze_url: WatsonException")
        if 'Error: limit exceeded for free plan, Code: 403' in e.args:
            raise e
        return None

    return response
def nlu(text):
    response = n.analyze(text=text,
                         features=[
                             features.Emotion(),
                             features.Concepts(),
                             features.Categories(),
                             features.Entities(),
                             features.Keywords(),
                             features.SemanticRoles(),
                             features.Relations(),
                             features.Sentiment()
                         ],
                         language='en')
    return json.dumps(response, indent=2)
Exemplo n.º 4
0
 def map_feature(name):
     feature_name_mappings = {
         'keywords': features.Keywords(),
         'entities': features.Entities(),
         'concepts': features.Concepts(),
         'categories': features.Categories(),
         'sentiment': features.Sentiment(),
         'emotion': features.Emotion()
     }
     if name in feature_name_mappings:
         return feature_name_mappings[name]
     else:
         print("Invalid feature name")
         return None
Exemplo n.º 5
0
 def featureList(self, tags):
     f_list = []
     for tag in tags:
         if tag == "sentiment":
             f_list.append(features.Sentiment())
         elif tag == "categories":
             f_list.append(features.Categories())
         elif tag == "concepts":
             f_list.append(features.Concepts())
         elif tag == "emotion":
             f_list.append(features.Emotion())
         elif tag == "entities":
             f_list.append(features.Entities())
     return f_list
Exemplo n.º 6
0
def main(params):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        username=params["username"],
        password=params["password"],
        version=params["version"])
    response = natural_language_understanding.analyze(
        url=params["url"],
        features=[
            Features.Concepts(limit=1),
            Features.Entities(limit=1),
            Features.Keywords(limit=1),
            Features.Categories(),
            Features.Emotion(),
            Features.Sentiment(),
            Features.MetaData(),
            Features.Relations(),
            Features.SemanticRoles(limit=1)
        ])
    return response
def execute_watson_request(text):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        username=constants.WATSON_USER,
        password=constants.WATSON_PASS,
        version="2017-02-27")

    try:
        response = natural_language_understanding.analyze(
            text=text,
            features=[
                features.Concepts(),
                features.Categories(),
                features.Emotion(),
                features.Entities(emotion=True, sentiment=True),
                features.Keywords(emotion=True, sentiment=True),
                features.Sentiment()
            ])
        return response
    except WatsonException as error:
        return str(error)
Exemplo n.º 8
0
def nlp(input_stuff):
    # Calls NaturalLanguageUnderstandingV1 API
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        version='2017-02-27',
        username="******",  # API Key
        password="******")  # Replace with personal API

    response = natural_language_understanding.analyze(
        text=input_stuff,
        features=[features.Concepts(), features.Entities(), features.Keywords(), features.Categories(), features.Emotion(), features.Sentiment(), features.Relations(), features.SemanticRoles()])
    nlu_data = {
        'sentiment': response["sentiment"],
        'semanticRoles': response["semantic_roles"],
        'concepts': response["concepts"],
        'entities': response["entities"],
        'relations': response["relations"],
        'concepts': response["concepts"],
        'categoreis': response["categories"]
    }
    nlu_data = [nlu_data]
    # print(nlu_data)
    return(nlu_data)
Exemplo n.º 9
0
    def understand_text(self):
        natural_language_understanding = NaturalLanguageUnderstandingV1(
            username=nlu_settings.get("username"),
            password=nlu_settings.get("password"),
            version="2017-02-27")

        self.nl_understanding = natural_language_understanding.analyze(
            text=self.converted_text,
            features=[
                Features.Entities(emotion=True, sentiment=True, limit=100),
                Features.Keywords(emotion=True, sentiment=True, limit=100),
                Features.Categories(),
                Features.Concepts(),
                Features.Sentiment(),
                Features.Emotion(),
                #     Features.Feature(),
                #     Features.MetaData(),
                Features.Relations(),
                Features.SemanticRoles(),
            ])

        return self.nl_understanding
Exemplo n.º 10
0
def callNLU(text):
    '''
	Checks what features are enabled, then makes a call to NLU and returns JSON. 
	:param text The string containing the information you want to analyse. 
	'''
    if text == None or text.strip() == '':
        return {}

    f = []
    if c.getboolean('nlu_feature', 'concepts'): f.append(features.Concepts())
    if c.getboolean('nlu_feature', 'entities'): f.append(features.Entities())
    if c.getboolean('nlu_feature', 'keywords'): f.append(features.Keywords())
    if c.getboolean('nlu_feature', 'categories'):
        f.append(features.Categories())
    if c.getboolean('nlu_feature', 'emotion'): f.append(features.Emotion())
    if c.getboolean('nlu_feature', 'semanticroles'):
        f.append(features.SemanticRoles())
    if c.getboolean('nlu_feature', 'relations'): f.append(features.Relations())
    if c.getboolean('nlu_feature', 'sentiment'): f.append(features.Sentiment())

    r = nlu.analyze(text=text, features=f)

    return r
Exemplo n.º 11
0
def get_data_from_bluemix(target_url):
    nl_understanding = cache_get(target_url)
    if not nl_understanding:
        natural_language_understanding = NaturalLanguageUnderstandingV1(
            username=nlu_settings.get("username"),
            password=nlu_settings.get("password"),
            version="2017-02-27")
        features = [
                Features.Entities(limit=100,emotion=True,sentiment=True),
                Features.Keywords(limit=100,emotion=True,sentiment=True),
                Features.Categories(),
                Features.Concepts(),
                Features.Sentiment(),
                Features.Emotion(),
                #     Features.Feature(),
                #     Features.MetaData(),
                Features.Relations(),
                Features.SemanticRoles(),

            ]
        nl_understanding = None

        for i in range(NUMBEROFTRIES):
            try:
                nl_understanding = natural_language_understanding.analyze(
                    url=target_url,
                    features=features
                )
            except:
                pass

            if nl_understanding:
                break
        cache_put(target_url, nl_understanding)

    return nl_understanding
Exemplo n.º 12
0
def main(args):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        version='2017-02-27',
        username='******',
        password='******')

    features_list = [
        features.Categories(),
        features.Concepts(),
        features.Emotion(),
        features.Entities(),
        features.Keywords(),
        features.MetaData(),
        features.Relations(),
        features.Sentiment()
    ]
    input_param_list = [
        'Categories', 'Concepts', 'Emotion', 'Entities', 'Keywords',
        'MetaData', 'Relations', 'Sentiment'
    ]
    input_param = args.get("type", "Emotion")

    response = natural_language_understanding.analyze(
        text=args.get("text", None),
        url=args.get("url", None),
        html=args.get("html", None),
        features=[features_list[input_param_list.index(input_param)]])

    if (args.get("type", "Emotion") == "Emotion"):
        result = emotion2result(response)
        return result
    if (args.get("type", "Emotion") == "Categories"):
        result = categories2result(response)
        return result
    if (args.get("type", "Emotion") == "Concepts"):
        result = concepts2result(response)
        return result
    if (args.get("type", "Emotion") != "Entities"):
        return response

    itemlist = dic2item(response)

    wiki_query = "http://en.wikipedia.org/w/api.php?action=query&" \
                 "prop=extracts&format=json&exintro=&titles="

    count = 0
    index = 0
    extractlist = {}
    while (count < 3 and index < len(itemlist)):
        temp = itemlist[index][0].encode("utf8")
        item = temp.split(" ")
        string = ""
        for i in item:
            string += i + "+"
        string = string[:len(string) - 1]
        res = try_url(wiki_query + string)
        # print res
        res_json = json.loads(res)
        extract = res_json["query"]["pages"]
        pagenum = extract.keys()[0]
        if (pagenum != "-1"):
            count += 1
            extract = extract[pagenum]
            extract = extract["extract"]
            extract = extract.encode("utf8")

            slist = extract.split(". ")
            if (slist is not None):
                extract = slist[0] + "."
            extract = clean(extract)
            extractlist[itemlist[index][0]] = extract
        index += 1
    if (extractlist == {}):
        return {"NULL": "NULL"}
    return extractlist
Exemplo n.º 13
0
def handle_message(conversation_client, slack_client, workspace_id, context,
                   message, channel, user):
    """Handler for messages coming from Watson Conversation using context.

        Fields in context will trigger various actions in this application.

        :param str message: text from UI
        :param SlackSender sender: used for send_message, hard-coded as Slack

        :returns: True if UI input is required, False if we want app
         processing and no input
        :rtype: Bool
    """
    global gv_nlu, gv_cortical_client, gv_bot_deafault_channel_name, gv_bot_deafault_channel_id, gv_ai
    url_list = []
    response = ""
    cortical_response_text = ""
    nlu_analyzed_text = ""
    nlu_responce_text = ""
    nlu_keyword = None
    nlu_entities = None
    context = None

    # extract URLs from the message of the post
    url_list = get_urls(slack_client, message)

    if url_list is not None:
        # send the message to user indicating that teh process of analysis started
        slack_client.api_call("chat.postMessage",
                              channel=channel,
                              text="analyzing . . . ",
                              as_user=True)
        for i in range(len(url_list)):
            try:
                # Analyze the URL article using WATSON Natural Language Understanding
                nlu_response = gv_nlu.analyze(url=url_list[i],
                                              return_analyzed_text=True,
                                              features=[
                                                  features.Categories(),
                                                  features.Concepts(),
                                                  features.Emotion(),
                                                  features.Entities(),
                                                  features.Keywords(),
                                                  features.MetaData(),
                                                  features.Relations(),
                                                  features.Sentiment()
                                              ])
                # get information from JSON format resulted by NLU
                nlu_responce_text, nlu_sentiment, nlu_categoties, nlu_entities, nlu_keyword, nlu_concepts, nlu_analyzed_text = convert_nlujson(
                    url_list[i], nlu_response)

            except WatsonException:
                # print(json.dumps(nlu_response, indent=2))
                nlu_responce_text = "Sentiments can not be retrieved from the URL"

            # performs CORTICAL SEMANTIC analysis and returns results as a response text
            cortical_response_text = cortical_analyze(nlu_analyzed_text,
                                                      nlu_keyword,
                                                      nlu_entities)

            # build response text
            title = "\n\n\n ===== Watson Sentiment Analysis =====\n"
            response = response + title + nlu_responce_text + cortical_response_text  ## Uncomment to add URL to the response text <+ url_list[i]>
            i = i + 1
    else:
        response = "No valid URL found !!!"

    return response
Exemplo n.º 14
0
def eval_categories():
    response = nlu.analyze(url='www.cnn.com', features=[features.Categories()])
    return jsonify(response)
Exemplo n.º 15
0
def handle_message(conversation_client, slack_client, workspace_id, context,
                   message, channel, user):
    """Handler for messages coming from Watson Conversation using context.

        Fields in context will trigger various actions in this application.

        :param str message: text from UI
        :param SlackSender sender: used for send_message, hard-coded as Slack

        :returns: True if UI input is required, False if we want app
         processing and no input
        :rtype: Bool
    """
    global gv_nlu, gv_cortical_client, gv_bot_deafault_channel_name, gv_bot_deafault_channel_id
    url_list = []
    response = ""
    cortical_response_text = ""
    nlu_analyzed_text = ""
    nlu_responce_text = ""
    nlu_keyword = None
    nlu_entities = None
    context = None

    # extract URLs from the message of the post
    url_list = get_urls(slack_client, message)

    if url_list is not None:
        # send the message to user indicating that teh process of analysis started
        slack_client.api_call("chat.postMessage",
                              channel=channel,
                              text="analyzing . . . ",
                              as_user=True)
        for i in range(len(url_list)):
            try:
                # Analyze the URL article using WATSON Natural Language Understanding
                nlu_response = gv_nlu.analyze(url=url_list[i],
                                              return_analyzed_text=True,
                                              features=[
                                                  features.Categories(),
                                                  features.Concepts(),
                                                  features.Emotion(),
                                                  features.Entities(),
                                                  features.Keywords(),
                                                  features.MetaData(),
                                                  features.Relations(),
                                                  features.Sentiment()
                                              ])
                # get information from JSON format resulted by NLU
                nlu_responce_text, nlu_sentiment, nlu_categoties, nlu_entities, nlu_keyword, nlu_concepts, nlu_analyzed_text = convert_nlujson(
                    url_list[i], nlu_response)

            except WatsonException:
                # print(json.dumps(nlu_response, indent=2))
                nlu_responce_text = "Sentiments can not be retrieved from the URL"

            # performs CORTICAL SEMANTIC analysis and returns results as a response text
            cortical_response_text = cortical_analyze(nlu_analyzed_text,
                                                      nlu_keyword,
                                                      nlu_entities)

            # build response text
            title = "\n\n\n ===== Watson Sentiment Analysis =====\n"
            response = url_list[
                i] + title + nlu_responce_text + cortical_response_text

            #             slack_client.api_call("chat.postMessage", channel=gv_bot_deafault_channel_id, text=response, as_user=True)  ## uncomment to post responses at Default channel
            slack_client.api_call(
                "chat.postMessage",
                channel=channel,
                text=response,
                as_user=True
            )  ## uncomment to post responses at the sender's channel
            i = i + 1

    # post receipt of the messate on the channel if it is not recevied from the default channel
#     if channel != gv_bot_deafault_channel_id :                                                                    ## uncomment to send receipt of the url to the sender
#         slack_client.api_call("chat.postMessage", channel=channel, text="Thanks, new post has been received !!!", as_user=True)

    else:
        slack_client.api_call(
            "chat.postMessage",
            channel=channel,
            text=
            "No URL found!!!. \nI am trained to read text from URL, conduct sentiment analysis and classify it using semantic comparison with points of interests: Technologies, Companies and Interests",
            as_user=True)

    return True
Exemplo n.º 16
0
            questions[questionToInsert] = counter
            break

    #When the end of the grouping has been reached, run the APIs
    if (counter % lineCount == 0):
        natural_language_understanding = NaturalLanguageUnderstandingV1(
            username="******",
            password="******",
            version="2017-02-27")

        response = natural_language_understanding.analyze(
            text=dialogue,
            features=[
                Features.Concepts(),
                #Features.Keywords(),
                Features.Categories()
            ])

        currConcepts = []
        currKeywords = []
        currCategories = []
        for concept in response['concepts']:
            if (concept['text'] not in concepts):
                currConcepts.append(concept['text'])
                concepts[concept['text']] = [counter]
            else:
                concepts[concept['text']].append(counter)
        '''for keyword in response['keywords']:
            if(keyword['text'] not in keywords):
                currKeywords.append(keyword['text'])
                keywords[keyword['text']] = [counter]
 def test_categories(self):
     c = features.Categories()
     assert(c.name() == 'categories')
     assert(c.toDict() == {})