def set_rules(): if request.method != 'POST': return [] data = json.loads(request.data) global knowledge knowledge = Knowledge() knowledge.add_rules(data['text']) return jsonify({'success': True})
def test_possesive_pronouns(self): sentence = 'My dog is red' drs = Drs.create_from_natural_language(sentence) knowledge = Knowledge() knowledge.add_rules(open(os.path.join(_path, '../rules/test.rules')).read()) fi = ForwardInference(drs, knowledge) drs_and_weight = fi.compute() writer = RelationTripletsWriter() lst = drs_and_weight[0][0].apply(writer) expected_list = [('me', 'OWN', 'dog')] self.assertEqual(lst, expected_list)
def test_relation_rules(self): data_drs = Drs.create_from_natural_language('Jim works at Microsoft') knowledge = Knowledge(metric) knowledge.add_rules(open(os.path.join(_path, '../rules/generic_relations.rules')).read()) inference = ForwardInference(data_drs, knowledge) end_drs = inference.compute() expected_drs = Drs.create_from_predicates_string('{}(1), {"text": "WORKS_AT"}(1,2), {}(2)') is_match = False for drs in end_drs: lst = drs[0].apply(DrsMatcher(expected_drs, metric)) if len(lst) > 0: is_match = True break self.assertTrue(is_match)
def test_single_clause(self): data_drs = Drs.create_from_natural_language('Jim works at Microsoft') rule = """ MATCH "{PERSON}#1 works at {ORG}#2" CREATE {}(1), {"type": "WORKS_AT"}(1,2), {}(2) """ knowledge = Knowledge(metric) knowledge.add_rules(rule) inference = ForwardInference(data_drs, knowledge) end_drs = inference.compute() expected_drs = Drs.create_from_predicates_string('{}(1), {"type": "WORKS_AT"}(1,2), {}(2)') is_match = False for drs in end_drs: lst = drs[0].apply(DrsMatcher(expected_drs, metric)) if len(lst) > 0: is_match = True break self.assertTrue(is_match)
def get_generic_knowledge(): knowledge = Knowledge() knowledge.add_rules( open(os.path.join(_path, '../rules/generic_relations.rules')).read()) return knowledge
def get_wikidata_knowledge(): knowledge = Knowledge() knowledge.add_rules( open(os.path.join(_path, '../rules/wikidata.rules')).read()) return knowledge
import os import unittest from pynsett.drt import Drs from pynsett.knowledge import Knowledge from pynsett.metric import MetricFactory from pynsett.drt.drs_matcher import DrsMatcher _path = os.path.dirname(__file__) metric = MetricFactory.get_best_available_metric() _knowledge = Knowledge() _knowledge.add_rules(open(os.path.join(_path, '../rules/test.rules')).read()) class PynsettUnitTests(unittest.TestCase): def test_snippet_1(self): """ Mostly to check conjunction rules. """ text = "Asimov also wrote mysteries and fantasy, as well as much nonfiction. Most of his popular science books explain concepts in a historical way, going as far back as possible to a time when the science in question was at its simplest stage. Examples include Guide to Science, the three-volume set Understanding Physics, and Asimov's Chronology of Science and Discovery. He wrote on numerous other scientific and non-scientific topics, such as chemistry, astronomy, mathematics, history, biblical exegesis, and literary criticism." drs = Drs.create_from_natural_language(text) expected_drs = Drs.create_from_predicates_string(""" {'word': 'wrote', 'tag': 'v', 'compound': 'wrote', 'entity': '', 'lemma': 'write', 'gender_guess': None, 'is_head_token': True, 'refers_to': None, 'negated': 'false', 'type': None}(v2), {'word': 'Asimov', 'tag': 'n', 'compound': 'Asimov', 'entity': 'PERSON', 'lemma': 'Asimov', 'gender_guess': None, 'is_head_token': False, 'refers_to': None, 'negated': 'false', 'type': None}(v0), {'word': 'also', 'tag': 'RB', 'compound': 'also', 'entity': '', 'lemma': 'also', 'gender_guess': None, 'is_head_token': False, 'refers_to': None, 'negated': 'false', 'type': None}(v1),
from flask import Flask, Response from flask import request from flask import jsonify from flask_cors import CORS from pynsett.auxiliary.prior_knowedge import get_wikidata_knowledge from pynsett.auxiliary.transform import transform_triplets_into_api_edges_and_nodes from pynsett.discourse import Discourse from pynsett.extractor import Extractor from pynsett.knowledge import Knowledge from pynsett.writer.drt_triplets_writer import DRTTripletsWriter pynsett_app = Flask('Pynsett') CORS(pynsett_app) wiki_knowledge = get_wikidata_knowledge() knowledge = Knowledge() @pynsett_app.route('/api/wikidata', methods=['POST']) def get_wikidata_triplets(): if request.method != 'POST': return [] data = json.loads(request.data) text = data['text'] discourse = Discourse(text) extractor = Extractor(discourse, wiki_knowledge) triplets = extractor.extract() return jsonify(transform_triplets_into_api_edges_and_nodes(triplets)) @pynsett_app.route('/api/relations', methods=['POST'])