コード例 #1
0
def main(params):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        username=params["username"],
        password=params["password"],
        version=params["version"])
    response = natural_language_understanding.analyze(
        url=params["url"],
        features=[
            Features.Concepts(limit=1),
            Features.Entities(limit=1),
            Features.Keywords(limit=1),
            Features.Categories(),
            Features.Emotion(),
            Features.Sentiment(),
            Features.MetaData(),
            Features.Relations(),
            Features.SemanticRoles(limit=1)
        ])
    return response
コード例 #2
0
def handle_message(conversation_client, slack_client, workspace_id, context,
                   message, channel, user):
    """Handler for messages coming from Watson Conversation using context.

        Fields in context will trigger various actions in this application.

        :param str message: text from UI
        :param SlackSender sender: used for send_message, hard-coded as Slack

        :returns: True if UI input is required, False if we want app
         processing and no input
        :rtype: Bool
    """
    global gv_nlu, gv_cortical_client, gv_bot_deafault_channel_name, gv_bot_deafault_channel_id, gv_ai
    url_list = []
    response = ""
    cortical_response_text = ""
    nlu_analyzed_text = ""
    nlu_responce_text = ""
    nlu_keyword = None
    nlu_entities = None
    context = None

    # extract URLs from the message of the post
    url_list = get_urls(slack_client, message)

    if url_list is not None:
        # send the message to user indicating that teh process of analysis started
        slack_client.api_call("chat.postMessage",
                              channel=channel,
                              text="analyzing . . . ",
                              as_user=True)
        for i in range(len(url_list)):
            try:
                # Analyze the URL article using WATSON Natural Language Understanding
                nlu_response = gv_nlu.analyze(url=url_list[i],
                                              return_analyzed_text=True,
                                              features=[
                                                  features.Categories(),
                                                  features.Concepts(),
                                                  features.Emotion(),
                                                  features.Entities(),
                                                  features.Keywords(),
                                                  features.MetaData(),
                                                  features.Relations(),
                                                  features.Sentiment()
                                              ])
                # get information from JSON format resulted by NLU
                nlu_responce_text, nlu_sentiment, nlu_categoties, nlu_entities, nlu_keyword, nlu_concepts, nlu_analyzed_text = convert_nlujson(
                    url_list[i], nlu_response)

            except WatsonException:
                # print(json.dumps(nlu_response, indent=2))
                nlu_responce_text = "Sentiments can not be retrieved from the URL"

            # performs CORTICAL SEMANTIC analysis and returns results as a response text
            cortical_response_text = cortical_analyze(nlu_analyzed_text,
                                                      nlu_keyword,
                                                      nlu_entities)

            # build response text
            title = "\n\n\n ===== Watson Sentiment Analysis =====\n"
            response = response + title + nlu_responce_text + cortical_response_text  ## Uncomment to add URL to the response text <+ url_list[i]>
            i = i + 1
    else:
        response = "No valid URL found !!!"

    return response
コード例 #3
0
natural_language_understanding = NaturalLanguageUnderstandingV1(
  username="******",
  password="******",
  version="2017-02-27")
with open('IBMjson', 'w') as ibmoutfile:
    try:
        response = natural_language_understanding.analyze(
          url=qqll,
          features=[
              Features.Emotion(),
        Features.Sentiment(),
      Features.Concepts(limit=1),
    Features.Keywords(limit=1, sentiment=False, emotion=False),
    Features.Categories(),
      Features.Entities(limit=1, sentiment=False, emotion=False),
        Features.MetaData()
          ]
        )
        #print(json.dumps(response, indent=2))
        json.dump(response, ibmoutfile)    
    except WatsonException as e:  # This is the correct syntax 
        json.dump(qqll, ibmoutfile)  

        
        
#==============================================================================
# URL        
#==============================================================================
#==============================================================================
# Leesbaarheid APi
#==============================================================================
コード例 #4
0
 def test_metadata(self):
     m = features.MetaData()
     assert(m.name() == 'metadata')
コード例 #5
0
def handle_message(conversation_client, slack_client, workspace_id, context,
                   message, channel, user):
    """Handler for messages coming from Watson Conversation using context.

        Fields in context will trigger various actions in this application.

        :param str message: text from UI
        :param SlackSender sender: used for send_message, hard-coded as Slack

        :returns: True if UI input is required, False if we want app
         processing and no input
        :rtype: Bool
    """
    global gv_nlu, gv_cortical_client, gv_bot_deafault_channel_name, gv_bot_deafault_channel_id
    url_list = []
    response = ""
    cortical_response_text = ""
    nlu_analyzed_text = ""
    nlu_responce_text = ""
    nlu_keyword = None
    nlu_entities = None
    context = None

    # extract URLs from the message of the post
    url_list = get_urls(slack_client, message)

    if url_list is not None:
        # send the message to user indicating that teh process of analysis started
        slack_client.api_call("chat.postMessage",
                              channel=channel,
                              text="analyzing . . . ",
                              as_user=True)
        for i in range(len(url_list)):
            try:
                # Analyze the URL article using WATSON Natural Language Understanding
                nlu_response = gv_nlu.analyze(url=url_list[i],
                                              return_analyzed_text=True,
                                              features=[
                                                  features.Categories(),
                                                  features.Concepts(),
                                                  features.Emotion(),
                                                  features.Entities(),
                                                  features.Keywords(),
                                                  features.MetaData(),
                                                  features.Relations(),
                                                  features.Sentiment()
                                              ])
                # get information from JSON format resulted by NLU
                nlu_responce_text, nlu_sentiment, nlu_categoties, nlu_entities, nlu_keyword, nlu_concepts, nlu_analyzed_text = convert_nlujson(
                    url_list[i], nlu_response)

            except WatsonException:
                # print(json.dumps(nlu_response, indent=2))
                nlu_responce_text = "Sentiments can not be retrieved from the URL"

            # performs CORTICAL SEMANTIC analysis and returns results as a response text
            cortical_response_text = cortical_analyze(nlu_analyzed_text,
                                                      nlu_keyword,
                                                      nlu_entities)

            # build response text
            title = "\n\n\n ===== Watson Sentiment Analysis =====\n"
            response = url_list[
                i] + title + nlu_responce_text + cortical_response_text

            #             slack_client.api_call("chat.postMessage", channel=gv_bot_deafault_channel_id, text=response, as_user=True)  ## uncomment to post responses at Default channel
            slack_client.api_call(
                "chat.postMessage",
                channel=channel,
                text=response,
                as_user=True
            )  ## uncomment to post responses at the sender's channel
            i = i + 1

    # post receipt of the messate on the channel if it is not recevied from the default channel
#     if channel != gv_bot_deafault_channel_id :                                                                    ## uncomment to send receipt of the url to the sender
#         slack_client.api_call("chat.postMessage", channel=channel, text="Thanks, new post has been received !!!", as_user=True)

    else:
        slack_client.api_call(
            "chat.postMessage",
            channel=channel,
            text=
            "No URL found!!!. \nI am trained to read text from URL, conduct sentiment analysis and classify it using semantic comparison with points of interests: Technologies, Companies and Interests",
            as_user=True)

    return True
コード例 #6
0
def eval_metadata():
    response = nlu.analyze(
        url='https://www.ibm.com/blogs/think/2017/01/cognitive-grid/',
        features=[features.MetaData()])
    return jsonify(response)
コード例 #7
0
def main(args):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        version='2017-02-27',
        username='******',
        password='******')

    features_list = [
        features.Categories(),
        features.Concepts(),
        features.Emotion(),
        features.Entities(),
        features.Keywords(),
        features.MetaData(),
        features.Relations(),
        features.Sentiment()
    ]
    input_param_list = [
        'Categories', 'Concepts', 'Emotion', 'Entities', 'Keywords',
        'MetaData', 'Relations', 'Sentiment'
    ]
    input_param = args.get("type", "Emotion")

    response = natural_language_understanding.analyze(
        text=args.get("text", None),
        url=args.get("url", None),
        html=args.get("html", None),
        features=[features_list[input_param_list.index(input_param)]])

    if (args.get("type", "Emotion") == "Emotion"):
        result = emotion2result(response)
        return result
    if (args.get("type", "Emotion") == "Categories"):
        result = categories2result(response)
        return result
    if (args.get("type", "Emotion") == "Concepts"):
        result = concepts2result(response)
        return result
    if (args.get("type", "Emotion") != "Entities"):
        return response

    itemlist = dic2item(response)

    wiki_query = "http://en.wikipedia.org/w/api.php?action=query&" \
                 "prop=extracts&format=json&exintro=&titles="

    count = 0
    index = 0
    extractlist = {}
    while (count < 3 and index < len(itemlist)):
        temp = itemlist[index][0].encode("utf8")
        item = temp.split(" ")
        string = ""
        for i in item:
            string += i + "+"
        string = string[:len(string) - 1]
        res = try_url(wiki_query + string)
        # print res
        res_json = json.loads(res)
        extract = res_json["query"]["pages"]
        pagenum = extract.keys()[0]
        if (pagenum != "-1"):
            count += 1
            extract = extract[pagenum]
            extract = extract["extract"]
            extract = extract.encode("utf8")

            slist = extract.split(". ")
            if (slist is not None):
                extract = slist[0] + "."
            extract = clean(extract)
            extractlist[itemlist[index][0]] = extract
        index += 1
    if (extractlist == {}):
        return {"NULL": "NULL"}
    return extractlist