def nlu(text):
    response = n.analyze(text=text,
                         features=[
                             features.Emotion(),
                             features.Concepts(),
                             features.Categories(),
                             features.Entities(),
                             features.Keywords(),
                             features.SemanticRoles(),
                             features.Relations(),
                             features.Sentiment()
                         ],
                         language='en')
    return json.dumps(response, indent=2)
示例#2
0
def main(params):
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        username=params["username"],
        password=params["password"],
        version=params["version"])
    response = natural_language_understanding.analyze(
        url=params["url"],
        features=[
            Features.Concepts(limit=1),
            Features.Entities(limit=1),
            Features.Keywords(limit=1),
            Features.Categories(),
            Features.Emotion(),
            Features.Sentiment(),
            Features.MetaData(),
            Features.Relations(),
            Features.SemanticRoles(limit=1)
        ])
    return response
示例#3
0
    def understand_text(self):
        natural_language_understanding = NaturalLanguageUnderstandingV1(
            username=nlu_settings.get("username"),
            password=nlu_settings.get("password"),
            version="2017-02-27")

        self.nl_understanding = natural_language_understanding.analyze(
            text=self.converted_text,
            features=[
                Features.Entities(emotion=True, sentiment=True, limit=100),
                Features.Keywords(emotion=True, sentiment=True, limit=100),
                Features.Categories(),
                Features.Concepts(),
                Features.Sentiment(),
                Features.Emotion(),
                #     Features.Feature(),
                #     Features.MetaData(),
                Features.Relations(),
                Features.SemanticRoles(),
            ])

        return self.nl_understanding
示例#4
0
def callNLU(text):
    '''
	Checks what features are enabled, then makes a call to NLU and returns JSON. 
	:param text The string containing the information you want to analyse. 
	'''
    if text == None or text.strip() == '':
        return {}

    f = []
    if c.getboolean('nlu_feature', 'concepts'): f.append(features.Concepts())
    if c.getboolean('nlu_feature', 'entities'): f.append(features.Entities())
    if c.getboolean('nlu_feature', 'keywords'): f.append(features.Keywords())
    if c.getboolean('nlu_feature', 'categories'):
        f.append(features.Categories())
    if c.getboolean('nlu_feature', 'emotion'): f.append(features.Emotion())
    if c.getboolean('nlu_feature', 'semanticroles'):
        f.append(features.SemanticRoles())
    if c.getboolean('nlu_feature', 'relations'): f.append(features.Relations())
    if c.getboolean('nlu_feature', 'sentiment'): f.append(features.Sentiment())

    r = nlu.analyze(text=text, features=f)

    return r
示例#5
0
def get_data_from_bluemix(target_url):
    nl_understanding = cache_get(target_url)
    if not nl_understanding:
        natural_language_understanding = NaturalLanguageUnderstandingV1(
            username=nlu_settings.get("username"),
            password=nlu_settings.get("password"),
            version="2017-02-27")
        features = [
                Features.Entities(limit=100,emotion=True,sentiment=True),
                Features.Keywords(limit=100,emotion=True,sentiment=True),
                Features.Categories(),
                Features.Concepts(),
                Features.Sentiment(),
                Features.Emotion(),
                #     Features.Feature(),
                #     Features.MetaData(),
                Features.Relations(),
                Features.SemanticRoles(),

            ]
        nl_understanding = None

        for i in range(NUMBEROFTRIES):
            try:
                nl_understanding = natural_language_understanding.analyze(
                    url=target_url,
                    features=features
                )
            except:
                pass

            if nl_understanding:
                break
        cache_put(target_url, nl_understanding)

    return nl_understanding
示例#6
0
ptr = 0
ctr = 0

#%%

for i in range(0, 5):
    doc_file = open(
        "C:/Users/Documents/Project/copy txt files/copy txt files/" +
        str(df_text.Filename[i])).read()
    demo_text = doc_file

    print i
    response = natural_language_understanding.analyze(
        text=demo_text,
        features=[Features.Relations(),
                  Features.SemanticRoles()])

    # time.sleep(2)
    ctr = ctr + 1
    #if j==2:
    #   time.sleep(5)

    for relation in response['semantic_roles']:
        df_ents.Filename[ptr] = df_text.Filename[i]

        relation_list = relation.keys()
        if any("subject" in s for s in relation_list):
            df_ents.Subject[ptr] = relation['subject']['text'].encode('utf-8')
        else:
            df_ents.Subject[ptr] = ""
        if any("action" in s for s in relation_list):
示例#7
0
def get_semantic_roles(text, limit=500, entities=True, keywords=None):
    r = nlu.analyze(text=text,
                    features=[features.SemanticRoles(limit=limit, 
                                                     entities=entities, 
                                                     keywords=keywords)])
    return r['semantic_roles']
 def test_semantic_roles(self):
     s = features.SemanticRoles()
     assert(s.name() == 'semantic_roles')
示例#9
0
def eval_semantic_roles():
    response = nlu.analyze(text='In 2011, Watson competed on Jeopardy!',
                           features=[features.SemanticRoles()])
    return jsonify(response)
 def __init__(self, inputText, googleLanguageClient = None, watsonClient = None, googleLanguageModel = [], watsonLanguageModel = [], semanticRoleList = [], entitySizeLimit = 5, entities= [], keywords = []):
     self.googleLanguageClient = language.Client()
     self.watsonClient =  self.initialize_watson_client()
     self.inputText = inputText
     self.googleLanguageModel = self.googleLanguageClient.document_from_text(self.inputText, language='es', encoding=language.Encoding.UTF8)      
     self.watsonLanguageModel = self.watsonClient.analyze(text = self.inputText, features=[features.Entities(), features.Keywords(), features.SemanticRoles()])
     self.entitySizeLimit = entitySizeLimit
     self.entities = self.extract_entities()
     self.keywords = self.extract_keywords()
     self.semanticRoleList = semanticRoleList
示例#11
0
def nlp(input_stuff):
    # Calls NaturalLanguageUnderstandingV1 API
    natural_language_understanding = NaturalLanguageUnderstandingV1(
        version='2017-02-27',
        username="******",  # API Key
        password="******")  # Replace with personal API

    response = natural_language_understanding.analyze(
        text=input_stuff,
        features=[features.Concepts(), features.Entities(), features.Keywords(), features.Categories(), features.Emotion(), features.Sentiment(), features.Relations(), features.SemanticRoles()])
    nlu_data = {
        'sentiment': response["sentiment"],
        'semanticRoles': response["semantic_roles"],
        'concepts': response["concepts"],
        'entities': response["entities"],
        'relations': response["relations"],
        'concepts': response["concepts"],
        'categoreis': response["categories"]
    }
    nlu_data = [nlu_data]
    # print(nlu_data)
    return(nlu_data)