예제 #1
0
def main():
    credentials = eval("\n".join(open(CREDENTIALS, "r").readlines()))

    natural_language_understanding = NaturalLanguageUnderstandingV1(
        version="2017-02-27",
        username=credentials["username"],
        password=credentials["password"]
    )

    listOfFiles = [f for f in listdir(PATH) if isfile(join(PATH, f))]#["testData/354962.json"]
    for fileName in listOfFiles:
        fileIn = open(join(PATH, fileName), "r")
        data = json.load(fileIn)
        val = data["html_lawbox"]
        if any(word in val for word in ["convict","acquit","guilty","innocen", "sentenc"]):
            print("Good file: ", fileName)
            for word in ["convict","acquit","guilty","innocen", "sentenc"]:
                if word in val:
                    print "it has ", word
            response = natural_language_understanding.analyze(
                text=data["html_lawbox"],
                features = [features.Entities(), features.Relations()]
            )
            print json.dumps(response, indent=2)
        else:
            print("Bad file: ", fileName)
def nlu(text):
    response = n.analyze(text=text,
                         features=[
                             features.Emotion(),
                             features.Concepts(),
                             features.Categories(),
                             features.Entities(),
                             features.Keywords(),
                             features.SemanticRoles(),
                             features.Relations(),
                             features.Sentiment()
                         ],
                         language='en')
    return json.dumps(response, indent=2)
예제 #3
0
def main(params):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        username=params["username"],
        password=params["password"],
        version=params["version"])
    response = natural_language_understanding.analyze(
        url=params["url"],
        features=[
            Features.Concepts(limit=1),
            Features.Entities(limit=1),
            Features.Keywords(limit=1),
            Features.Categories(),
            Features.Emotion(),
            Features.Sentiment(),
            Features.MetaData(),
            Features.Relations(),
            Features.SemanticRoles(limit=1)
        ])
    return response
예제 #4
0
    def understand_text(self):
        natural_language_understanding = NaturalLanguageUnderstandingV1(
            username=nlu_settings.get("username"),
            password=nlu_settings.get("password"),
            version="2017-02-27")

        self.nl_understanding = natural_language_understanding.analyze(
            text=self.converted_text,
            features=[
                Features.Entities(emotion=True, sentiment=True, limit=100),
                Features.Keywords(emotion=True, sentiment=True, limit=100),
                Features.Categories(),
                Features.Concepts(),
                Features.Sentiment(),
                Features.Emotion(),
                #     Features.Feature(),
                #     Features.MetaData(),
                Features.Relations(),
                Features.SemanticRoles(),
            ])

        return self.nl_understanding
예제 #5
0
def callNLU(text):
    '''
	Checks what features are enabled, then makes a call to NLU and returns JSON. 
	:param text The string containing the information you want to analyse. 
	'''
    if text == None or text.strip() == '':
        return {}

    f = []
    if c.getboolean('nlu_feature', 'concepts'): f.append(features.Concepts())
    if c.getboolean('nlu_feature', 'entities'): f.append(features.Entities())
    if c.getboolean('nlu_feature', 'keywords'): f.append(features.Keywords())
    if c.getboolean('nlu_feature', 'categories'):
        f.append(features.Categories())
    if c.getboolean('nlu_feature', 'emotion'): f.append(features.Emotion())
    if c.getboolean('nlu_feature', 'semanticroles'):
        f.append(features.SemanticRoles())
    if c.getboolean('nlu_feature', 'relations'): f.append(features.Relations())
    if c.getboolean('nlu_feature', 'sentiment'): f.append(features.Sentiment())

    r = nlu.analyze(text=text, features=f)

    return r
예제 #6
0
def get_data_from_bluemix(target_url):
    nl_understanding = cache_get(target_url)
    if not nl_understanding:
        natural_language_understanding = NaturalLanguageUnderstandingV1(
            username=nlu_settings.get("username"),
            password=nlu_settings.get("password"),
            version="2017-02-27")
        features = [
                Features.Entities(limit=100,emotion=True,sentiment=True),
                Features.Keywords(limit=100,emotion=True,sentiment=True),
                Features.Categories(),
                Features.Concepts(),
                Features.Sentiment(),
                Features.Emotion(),
                #     Features.Feature(),
                #     Features.MetaData(),
                Features.Relations(),
                Features.SemanticRoles(),

            ]
        nl_understanding = None

        for i in range(NUMBEROFTRIES):
            try:
                nl_understanding = natural_language_understanding.analyze(
                    url=target_url,
                    features=features
                )
            except:
                pass

            if nl_understanding:
                break
        cache_put(target_url, nl_understanding)

    return nl_understanding
예제 #7
0
def handle_message(conversation_client, slack_client, workspace_id, context,
                   message, channel, user):
    """Handler for messages coming from Watson Conversation using context.

        Fields in context will trigger various actions in this application.

        :param str message: text from UI
        :param SlackSender sender: used for send_message, hard-coded as Slack

        :returns: True if UI input is required, False if we want app
         processing and no input
        :rtype: Bool
    """
    global gv_nlu, gv_cortical_client, gv_bot_deafault_channel_name, gv_bot_deafault_channel_id, gv_ai
    url_list = []
    response = ""
    cortical_response_text = ""
    nlu_analyzed_text = ""
    nlu_responce_text = ""
    nlu_keyword = None
    nlu_entities = None
    context = None

    # extract URLs from the message of the post
    url_list = get_urls(slack_client, message)

    if url_list is not None:
        # send the message to user indicating that teh process of analysis started
        slack_client.api_call("chat.postMessage",
                              channel=channel,
                              text="analyzing . . . ",
                              as_user=True)
        for i in range(len(url_list)):
            try:
                # Analyze the URL article using WATSON Natural Language Understanding
                nlu_response = gv_nlu.analyze(url=url_list[i],
                                              return_analyzed_text=True,
                                              features=[
                                                  features.Categories(),
                                                  features.Concepts(),
                                                  features.Emotion(),
                                                  features.Entities(),
                                                  features.Keywords(),
                                                  features.MetaData(),
                                                  features.Relations(),
                                                  features.Sentiment()
                                              ])
                # get information from JSON format resulted by NLU
                nlu_responce_text, nlu_sentiment, nlu_categoties, nlu_entities, nlu_keyword, nlu_concepts, nlu_analyzed_text = convert_nlujson(
                    url_list[i], nlu_response)

            except WatsonException:
                # print(json.dumps(nlu_response, indent=2))
                nlu_responce_text = "Sentiments can not be retrieved from the URL"

            # performs CORTICAL SEMANTIC analysis and returns results as a response text
            cortical_response_text = cortical_analyze(nlu_analyzed_text,
                                                      nlu_keyword,
                                                      nlu_entities)

            # build response text
            title = "\n\n\n ===== Watson Sentiment Analysis =====\n"
            response = response + title + nlu_responce_text + cortical_response_text  ## Uncomment to add URL to the response text <+ url_list[i]>
            i = i + 1
    else:
        response = "No valid URL found !!!"

    return response
예제 #8
0
ptr = 0
ctr = 0

#%%

for i in range(0, 5):
    doc_file = open(
        "C:/Users/Documents/Project/copy txt files/copy txt files/" +
        str(df_text.Filename[i])).read()
    demo_text = doc_file

    print i
    response = natural_language_understanding.analyze(
        text=demo_text,
        features=[Features.Relations(),
                  Features.SemanticRoles()])

    # time.sleep(2)
    ctr = ctr + 1
    #if j==2:
    #   time.sleep(5)

    for relation in response['semantic_roles']:
        df_ents.Filename[ptr] = df_text.Filename[i]

        relation_list = relation.keys()
        if any("subject" in s for s in relation_list):
            df_ents.Subject[ptr] = relation['subject']['text'].encode('utf-8')
        else:
            df_ents.Subject[ptr] = ""
예제 #9
0

natural_language_understanding = NaturalLanguageUnderstandingV1(
    version='2017-02-27',
    username='******',
    password='******')


file = open('IPIC-31087.txt','r',encoding='utf8')
txt_input =file.read()
file.close()


response_default = natural_language_understanding.analyze(
   text=txt_input,
  features=[features.Entities(), features.Relations() ])
	
response = natural_language_understanding.analyze(
   text=txt_input,
    features=[features.Entities(model='10:33c308c3-14fb-4612-a6bf-ea4da686896a'), features.Relations(model='10:33c308c3-14fb-4612-a6bf-ea4da686896a') ])

print ( 'Default NLU output')
print(json.dumps(response_default, indent=2))
print ('--------------------------------------------------------------')
print ( 'NLU output when invoked with custome wks model')
print(json.dumps(response, indent=2))


# collective_annotations = {}

# collective_annotations["relations"] = response_default["relations"] + response["relations"]
 def test_relations(self):
     r = features.Relations()
     assert(r.name() == 'relations')
예제 #11
0
def handle_message(conversation_client, slack_client, workspace_id, context,
                   message, channel, user):
    """Handler for messages coming from Watson Conversation using context.

        Fields in context will trigger various actions in this application.

        :param str message: text from UI
        :param SlackSender sender: used for send_message, hard-coded as Slack

        :returns: True if UI input is required, False if we want app
         processing and no input
        :rtype: Bool
    """
    global gv_nlu, gv_cortical_client, gv_bot_deafault_channel_name, gv_bot_deafault_channel_id
    url_list = []
    response = ""
    cortical_response_text = ""
    nlu_analyzed_text = ""
    nlu_responce_text = ""
    nlu_keyword = None
    nlu_entities = None
    context = None

    # extract URLs from the message of the post
    url_list = get_urls(slack_client, message)

    if url_list is not None:
        # send the message to user indicating that teh process of analysis started
        slack_client.api_call("chat.postMessage",
                              channel=channel,
                              text="analyzing . . . ",
                              as_user=True)
        for i in range(len(url_list)):
            try:
                # Analyze the URL article using WATSON Natural Language Understanding
                nlu_response = gv_nlu.analyze(url=url_list[i],
                                              return_analyzed_text=True,
                                              features=[
                                                  features.Categories(),
                                                  features.Concepts(),
                                                  features.Emotion(),
                                                  features.Entities(),
                                                  features.Keywords(),
                                                  features.MetaData(),
                                                  features.Relations(),
                                                  features.Sentiment()
                                              ])
                # get information from JSON format resulted by NLU
                nlu_responce_text, nlu_sentiment, nlu_categoties, nlu_entities, nlu_keyword, nlu_concepts, nlu_analyzed_text = convert_nlujson(
                    url_list[i], nlu_response)

            except WatsonException:
                # print(json.dumps(nlu_response, indent=2))
                nlu_responce_text = "Sentiments can not be retrieved from the URL"

            # performs CORTICAL SEMANTIC analysis and returns results as a response text
            cortical_response_text = cortical_analyze(nlu_analyzed_text,
                                                      nlu_keyword,
                                                      nlu_entities)

            # build response text
            title = "\n\n\n ===== Watson Sentiment Analysis =====\n"
            response = url_list[
                i] + title + nlu_responce_text + cortical_response_text

            #             slack_client.api_call("chat.postMessage", channel=gv_bot_deafault_channel_id, text=response, as_user=True)  ## uncomment to post responses at Default channel
            slack_client.api_call(
                "chat.postMessage",
                channel=channel,
                text=response,
                as_user=True
            )  ## uncomment to post responses at the sender's channel
            i = i + 1

    # post receipt of the messate on the channel if it is not recevied from the default channel
#     if channel != gv_bot_deafault_channel_id :                                                                    ## uncomment to send receipt of the url to the sender
#         slack_client.api_call("chat.postMessage", channel=channel, text="Thanks, new post has been received !!!", as_user=True)

    else:
        slack_client.api_call(
            "chat.postMessage",
            channel=channel,
            text=
            "No URL found!!!. \nI am trained to read text from URL, conduct sentiment analysis and classify it using semantic comparison with points of interests: Technologies, Companies and Interests",
            as_user=True)

    return True
예제 #12
0
def eval_relations():
    response = nlu.analyze(
        text='The Nobel Prize in Physics 1921 was awarded to Albert Einstein.',
        features=[features.Relations()])
    return jsonify(response)
예제 #13
0
def main(args):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        version='2017-02-27',
        username='******',
        password='******')

    features_list = [
        features.Categories(),
        features.Concepts(),
        features.Emotion(),
        features.Entities(),
        features.Keywords(),
        features.MetaData(),
        features.Relations(),
        features.Sentiment()
    ]
    input_param_list = [
        'Categories', 'Concepts', 'Emotion', 'Entities', 'Keywords',
        'MetaData', 'Relations', 'Sentiment'
    ]
    input_param = args.get("type", "Emotion")

    response = natural_language_understanding.analyze(
        text=args.get("text", None),
        url=args.get("url", None),
        html=args.get("html", None),
        features=[features_list[input_param_list.index(input_param)]])

    if (args.get("type", "Emotion") == "Emotion"):
        result = emotion2result(response)
        return result
    if (args.get("type", "Emotion") == "Categories"):
        result = categories2result(response)
        return result
    if (args.get("type", "Emotion") == "Concepts"):
        result = concepts2result(response)
        return result
    if (args.get("type", "Emotion") != "Entities"):
        return response

    itemlist = dic2item(response)

    wiki_query = "http://en.wikipedia.org/w/api.php?action=query&" \
                 "prop=extracts&format=json&exintro=&titles="

    count = 0
    index = 0
    extractlist = {}
    while (count < 3 and index < len(itemlist)):
        temp = itemlist[index][0].encode("utf8")
        item = temp.split(" ")
        string = ""
        for i in item:
            string += i + "+"
        string = string[:len(string) - 1]
        res = try_url(wiki_query + string)
        # print res
        res_json = json.loads(res)
        extract = res_json["query"]["pages"]
        pagenum = extract.keys()[0]
        if (pagenum != "-1"):
            count += 1
            extract = extract[pagenum]
            extract = extract["extract"]
            extract = extract.encode("utf8")

            slist = extract.split(". ")
            if (slist is not None):
                extract = slist[0] + "."
            extract = clean(extract)
            extractlist[itemlist[index][0]] = extract
        index += 1
    if (extractlist == {}):
        return {"NULL": "NULL"}
    return extractlist
예제 #14
0
def nlp(input_stuff):
    # Calls NaturalLanguageUnderstandingV1 API
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        version='2017-02-27',
        username="******",  # API Key
        password="******")  # Replace with personal API

    response = natural_language_understanding.analyze(
        text=input_stuff,
        features=[features.Concepts(), features.Entities(), features.Keywords(), features.Categories(), features.Emotion(), features.Sentiment(), features.Relations(), features.SemanticRoles()])
    nlu_data = {
        'sentiment': response["sentiment"],
        'semanticRoles': response["semantic_roles"],
        'concepts': response["concepts"],
        'entities': response["entities"],
        'relations': response["relations"],
        'concepts': response["concepts"],
        'categoreis': response["categories"]
    }
    nlu_data = [nlu_data]
    # print(nlu_data)
    return(nlu_data)