def test_context_composition(self): G = InferenceGraph() a = Alist( **{ tt.ID: '1', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.TIME: '2023', tt.OBJECT: '', tt.OPVAR: '?x', tt.COST: 1 }) ctx1 = [{ ctx.nationality: 'United Kingdom' }, { ctx.place: 'United Kingdom', ctx.device: 'computer', ctx.datetime: '2010-07-27 11:00:00' }, {}] a.set(tt.CONTEXT, ctx1) G.add_alist(a) query_ctx = frank.context.inject_query_context # query context should infer the ctx.accuracy from ctx.device op_alist = Temporal().decompose(query_ctx(a), G) self.assertEqual( (op_alist.get(tt.OP), len(G.child_alists(op_alist.id))), ('gpregress', 19))
def test_normalize_filter_with_location(self): alist = Alist( **{ tt.ID: '1', tt.SUBJECT: '$y', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2010', tt.OPVAR: '?x', tt.COST: 1, '$y': { "$filter": [{ "p": "type", "o": "country" }, { "p": "location", "o": "Africa" }] } }) G = InferenceGraph() G.add_alist(alist) normalize = Normalize() results = normalize.decompose(alist, G) self.assertTrue(len(G.child_alists(alist.id)) > 0)
def descendant_explanation(self, G: InferenceGraph, alist: Alist, summary, max_length, length): if length <= max_length: # for child in alist.children: for child in G.child_alists(alist.id): summary = f"{summary}{' ' + child.get('how') if 'how' in child.attributes else ''}" + \ f"{' ' + child.get('what') if 'what' in child.attributes else ''}".strip() summary = self.descendant_explanation(G, child, summary, max_length, length + 1) return summary
def why(self, G: InferenceGraph, alist: Alist, decomp_op, in_place=True): ''' Explain a decomposition of this alist. Assumes a failed instantiation of this alist following KB searches''' expl = "" time = "" children = G.child_alists(alist.id) if alist.get(tt.TIME): time = f" in {alist.get(tt.TIME)}" if decomp_op == 'temporal': expl = f"Could not find the {alist.get(tt.PROPERTY)} of {alist.instantiation_value(tt.SUBJECT)}{time}. " decomp_items = [] # for c in alist.children[0].children: for c in children: decomp_items.append(c.get(tt.TIME)) if len(decomp_items) >= 2: expl += f"Attempted to infer the required value{time} by finding the {alist.get(tt.PROPERTY)} of {alist.instantiation_value(tt.SUBJECT)} " + \ f"at other times between {min(decomp_items)} and {max(decomp_items)}." elif decomp_op == 'geospatial': expl = f"Could not find the {alist.get(tt.PROPERTY)} of {alist.instantiation_value(tt.SUBJECT)}{time}. " decomp_items = [] # for c in alist.children[0].children: for c in G.child_alists(children[0].id): decomp_items.append(c.instantiation_value(tt.SUBJECT)) entities = '' if len(decomp_items) > 8: entities = f"{', '.join(decomp_items[0:8])} etc" else: entities = f"{', '.join(decomp_items[0:len(decomp_items)-1])} and {decomp_items[-1]}" if decomp_items: expl += f"Finding the {alist.get(tt.PROPERTY)}{time} for the constituent parts of " + \ f" {alist.instantiation_value(tt.SUBJECT)}: {entities}." elif decomp_op == 'normalize': expl = f"Need to solve the sub-queries before determining the {alist.get(tt.PROPERTY)}{time}." elif decomp_op == 'comparison': expl = f"Need to solve the sub-queries to determine the items to compare." if in_place: alist.set("why", expl) G.add_alist(alist)
def test_comp(self): # root = Alist(**{tt.ID: '1', tt.SUBJECT: '$y', tt.PROPERTY: 'P1082', # tt.OBJECT: '?x', tt.TIME: '2016', tt.OPVAR: '?x', tt.COST: 1}) # node101 = Alist(**{tt.OP:'comp', tt.ID: '1', tt.SUBJECT: '$y', tt.PROPERTY: 'P1082', # tt.OBJECT: '?x', tt.TIME: '2016', tt.OPVAR: '?x', tt.COST: 1}) a = Alist( **{ tt.ID: '1', tt.SUBJECT: '$y', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2010', tt.OPVAR: '?x', tt.COST: 1, '$y': { "$is": "Ghana" } }) G = InferenceGraph() G.add_alist(a) normalize.Normalize().decompose(a, G) child1 = G.child_alists(a.id)[0] result = frank.reduce.comp.reduce(child1, G.child_alists(child1.id), G) self.assertTrue(result != None)
def test_geospatial(self): alist = Alist( **{ tt.ID: '1', tt.SUBJECT: 'Africa', tt.PROPERTY: 'P1082', tt.OBJECT: '', tt.TIME: '2010', tt.OPVAR: '?x', tt.COST: 1 }) G = InferenceGraph() G.add_alist(alist) geospatial = Geospatial() results = geospatial.decompose(alist, G) self.assertTrue( len(G.child_alists(alist.id)) > 0, "geospatial decomp should return more than one child")
def test_temporal(self): alist = Alist( **{ tt.ID: '0', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '', tt.TIME: '2010', tt.OPVAR: '?x', tt.COST: 1 }) G = InferenceGraph() G.add_alist(alist) temporal = Temporal() results = temporal.decompose(alist, G) self.assertTrue( len(G.child_alists(alist.id)) > 0, "should have more than one element")
def test_gpregress_2(self): alist = Alist( **{ tt.ID: '101', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2020', tt.OPVAR: '?x', tt.COST: 1 }) c1 = Alist( **{ tt.ID: '21011', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2019.0', tt.OPVAR: '?x', tt.COST: 1, '?x': 1839758040765.62 }) c2 = Alist( **{ tt.ID: '21012', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2018.0', tt.OPVAR: '?x', tt.COST: 1, '?x': 1885482534238.33 }) G = InferenceGraph() G.add_alist(alist) G.link(alist, c1) G.link(alist, c2) a = frank.reduce.gpregress.reduce(alist, G.child_alists(alist.id), G) print(a) self.assertAlmostEqual(a.instantiation_value(tt.OPVAR), 1792866444829.7, places=1)
def test_normalize_is(self): alist = Alist( **{ tt.ID: '1', tt.SUBJECT: '$y', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2010', tt.OPVAR: '?x', tt.COST: 1, '$y': { "$is": "Ghana" } }) G = InferenceGraph() G.add_alist(alist) normalize = Normalize() results = normalize.decompose(alist, G) self.assertTrue(len(G.child_alists(alist.id)) > 0)
class TestReduce(unittest.TestCase): def setUp(self): self.G = InferenceGraph() self.alist = Alist( **{ tt.ID: '1', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2020', tt.OPVAR: '?x', tt.COST: 1 }) self.c1 = Alist( **{ tt.ID: '2', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2010', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c1.instantiate_variable('?x', '120') self.c2 = Alist( **{ tt.ID: '3', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2011', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c2.instantiate_variable('?x', '122') self.c3 = Alist( **{ tt.ID: '4', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2012', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c3.instantiate_variable('?x', '126') self.c4 = Alist( **{ tt.ID: '5', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2013', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c4.instantiate_variable('?x', '125') self.c5 = Alist( **{ tt.ID: '5', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2014', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c5.instantiate_variable('?x', '126') self.c6 = Alist( **{ tt.ID: '6', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2015', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c6.instantiate_variable('?x', '128') self.c7 = Alist( **{ tt.ID: '7', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2016', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c7.instantiate_variable('?x', '129') self.G.add_alist(self.alist) self.G.link(self.alist, self.c1) self.G.link(self.alist, self.c2) self.G.link(self.alist, self.c3) self.G.link(self.alist, self.c4) self.G.link(self.alist, self.c5) self.G.link(self.alist, self.c6) self.G.link(self.alist, self.c7) self.G2 = InferenceGraph() self.alist2 = Alist( **{ tt.ID: '1', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2020', tt.OPVAR: '?x', tt.COST: 1 }) self.c21 = Alist( **{ tt.ID: '2', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2010', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c21.instantiate_variable('?x', 'a') self.c22 = Alist( **{ tt.ID: '3', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2011', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c22.instantiate_variable('?x', 'b') self.c23 = Alist( **{ tt.ID: '4', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2012', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c23.instantiate_variable('?x', 'c') self.c24 = Alist( **{ tt.ID: '5', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2013', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c24.instantiate_variable('?x', 'd') self.c25 = Alist( **{ tt.ID: '5', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2014', tt.OPVAR: '?x', tt.COST: 1, '?x': '' }) self.c25.instantiate_variable('?x', 'a') self.G2.add_alist(self.alist2) self.G2.link(self.alist2, self.c21) self.G2.link(self.alist2, self.c22) self.G2.link(self.alist2, self.c23) self.G2.link(self.alist2, self.c24) self.G2.link(self.alist2, self.c25) def test_value(self): a = frank.reduce.value.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) self.assertTrue(a.instantiation_value(tt.OBJECT), '124') def test_value2(self): a = frank.reduce.value.reduce(self.alist2, self.G2.child_alists(self.alist2.id), self.G2) self.assertTrue(a.instantiation_value(tt.OBJECT), 'a') def test_values(self): a = frank.reduce.values.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) self.assertEqual(a.instantiation_value(tt.OBJECT), '120,122,126,125,126,128,129') def test_sum(self): a = frank.reduce.sum.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) self.assertEqual(float(a.instantiation_value(tt.OPVAR)), 876.0) def test_max(self): a = frank.reduce.max.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) self.assertEqual(int(a.instantiation_value(tt.OPVAR)), 129) def test_min(self): a = frank.reduce.min.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) self.assertEqual(a.instantiation_value(tt.OPVAR), '120') def test_count(self): a = frank.reduce.count.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) self.assertEqual(a.instantiation_value(tt.OPVAR), 7) def test_product(self): a = frank.reduce.product.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) self.assertEqual(a.instantiation_value(tt.OPVAR), 479724456960000.0) def test_regress(self): a = frank.reduce.regress.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) print(a) self.assertAlmostEqual(a.instantiation_value(tt.OPVAR), 134.89, places=2) def test_gpregress(self): a = frank.reduce.gpregress.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) print(a) self.assertAlmostEqual(a.instantiation_value(tt.OPVAR), 134, places=0) def test_gpregress_2(self): alist = Alist( **{ tt.ID: '101', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2020', tt.OPVAR: '?x', tt.COST: 1 }) c1 = Alist( **{ tt.ID: '21011', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2019.0', tt.OPVAR: '?x', tt.COST: 1, '?x': 1839758040765.62 }) c2 = Alist( **{ tt.ID: '21012', tt.SUBJECT: 'Ghana', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2018.0', tt.OPVAR: '?x', tt.COST: 1, '?x': 1885482534238.33 }) G = InferenceGraph() G.add_alist(alist) G.link(alist, c1) G.link(alist, c2) a = frank.reduce.gpregress.reduce(alist, G.child_alists(alist.id), G) print(a) self.assertAlmostEqual(a.instantiation_value(tt.OPVAR), 1792866444829.7, places=1) def test_do_gpregress(self): data = [[2019.0, 1839758040765.62], [2018.0, 1885482534238.33], [2017.0, 2055505502224.73], [2016.0, 1793989048409.29], [2015.0, 1802214373741.32], [2014.0, 2455993625159.37], [2013.0, 2472806919901.67], [2012.0, 2465188674415.03], [2011.0, 2616201578192.25], [2010.0, 2208871646202.82], [2009.0, 1667019780934.28], [2008.0, 1695824571927.15], [2007.0, 1397084345950.39], [2006.0, 1107640297889.95]] X, y = [], [] for d in data: X.append([d[0]]) y.append(d[1]) X = np.array(X) y = np.array(y) predict = frank.reduce.gpregress.do_gpregress(X, y, np.array( [2022.]), (np.max(y) - np.min(y))**2, 1) y_predict = predict[0]['y'] self.assertAlmostEqual(y_predict, 1324535292167, places=0) @unittest.skip def test_nnpredict(self): a = frank.reduce.nnpredict.reduce(self.alist, self.G.child_alists(self.alist.id), self.G) self.assertAlmostEqual(a.instantiation_value(tt.OPVAR), 158.97, places=2) def test_comp(self): # root = Alist(**{tt.ID: '1', tt.SUBJECT: '$y', tt.PROPERTY: 'P1082', # tt.OBJECT: '?x', tt.TIME: '2016', tt.OPVAR: '?x', tt.COST: 1}) # node101 = Alist(**{tt.OP:'comp', tt.ID: '1', tt.SUBJECT: '$y', tt.PROPERTY: 'P1082', # tt.OBJECT: '?x', tt.TIME: '2016', tt.OPVAR: '?x', tt.COST: 1}) a = Alist( **{ tt.ID: '1', tt.SUBJECT: '$y', tt.PROPERTY: 'P1082', tt.OBJECT: '?x', tt.TIME: '2010', tt.OPVAR: '?x', tt.COST: 1, '$y': { "$is": "Ghana" } }) G = InferenceGraph() G.add_alist(a) normalize.Normalize().decompose(a, G) child1 = G.child_alists(a.id)[0] result = frank.reduce.comp.reduce(child1, G.child_alists(child1.id), G) self.assertTrue(result != None) def test_eq(self): a = Alist(**{ tt.ID: '1', tt.OPVAR: '$x $y', '$x': '?x1', '$y': '?y1', '?_eq_': '' }) b = Alist(**{tt.ID: '2', tt.OPVAR: '?x1', '?x1': 20}) c = Alist(**{tt.ID: '3', tt.OPVAR: '?y1', '?y1': 20}) G = InferenceGraph() G.add_alist(a) G.link(a, b) G.link(a, c) result = frank.reduce.eq.reduce(a, [b, c], G) self.assertTrue(True if result.instantiation_value('?_eq_') == 'true' else False) def test_gt(self): a = Alist(**{ tt.ID: '1', tt.OPVAR: '$x $y', '$x': '?x1', '$y': '?y1', '?_gt_': '' }) b = Alist(**{tt.ID: '2', tt.OPVAR: '?x1', '?x1': 36}) c = Alist(**{tt.ID: '3', tt.OPVAR: '?y1', '?y1': 33}) G = InferenceGraph() G.add_alist(a) G.link(a, b) G.link(a, c) result = frank.reduce.gt.reduce(a, [b, c], G) self.assertTrue(True if result.instantiation_value('?_gt_') == 'true' else False) def test_gte(self): a = Alist(**{ tt.ID: '1', tt.OPVAR: '$x $y', '$x': '?x1', '$y': '?y1', '?_gte_': '' }) b = Alist(**{tt.ID: '2', tt.OPVAR: '?x1', '?x1': 33}) c = Alist(**{tt.ID: '3', tt.OPVAR: '?y1', '?y1': 33}) G = InferenceGraph() G.add_alist(a) G.link(a, b) G.link(a, c) result = frank.reduce.gte.reduce(a, [b, c], G) self.assertTrue(True if result.instantiation_value('?_gte_') == 'true' else False) def test_lt(self): a = Alist(**{ tt.ID: '1', tt.OPVAR: '$x $y', '$x': '?x1', '$y': '?y1', '?_lt_': '' }) b = Alist(**{tt.ID: '2', tt.OPVAR: '?x1', '?x1': 20}) c = Alist(**{tt.ID: '3', tt.OPVAR: '?y1', '?y1': 30}) G = InferenceGraph() G.add_alist(a) G.link(a, b) G.link(a, c) result = frank.reduce.lt.reduce(a, [b, c], G) self.assertTrue(True if result.instantiation_value('?_lt_') == 'true' else False) def test_lte(self): a = Alist(**{ tt.ID: '1', tt.OPVAR: '$x $y', '$x': '?x1', '$y': '?y1', '?_lte_': '' }) b = Alist(**{tt.ID: '2', tt.OPVAR: '?x1', '?x1': 30}) c = Alist(**{tt.ID: '3', tt.OPVAR: '?y1', '?y1': 30}) G = InferenceGraph() G.add_alist(a) G.link(a, b) G.link(a, c) result = frank.reduce.lte.reduce(a, [b, c], G) self.assertTrue(True if result.instantiation_value('?_lte_') == 'true' else False)
def reduce(alist: Alist, children: List[Alist], G: InferenceGraph): if not children: return None nodes_enqueue = [] nodes_enqueue_process = [] # get intersection of child values common_items = set() head, *tail = children has_head_children = False has_tail_children = False for c in G.child_alists(head.id): has_head_children = True if c.get(tt.OP) != 'comp': if c.get(tt.OPVAR).startswith(vx.NESTING): common_items.add(str(c.instantiation_value(c.get(tt.OPVAR)))) else: projVars = c.projection_variables() if projVars != None: for pvkey, pvval in projVars.items(): common_items.add(c.instantiation_value(pvkey)) for t in tail: c_items = set() for tc in G.child_alists(t.id): has_tail_children = True if tc.get(tt.OPVAR).startswith(vx.NESTING): c_items.add(str(c.instantiation_value(tc.get(tt.OPVAR)))) projVars = tc.projection_variables() if projVars != None: for pvkey, pvval in projVars.items(): c_items.add(tc.instantiation_value(pvkey)) common_items = common_items.intersection(c_items) if not common_items and not has_head_children and not has_tail_children: for c in children: if c.get(tt.OP) != 'comp': if c.get(tt.OPVAR).startswith(vx.NESTING): common_items.add( str(c.instantiation_value(c.get(tt.OPVAR)))) else: projVars = c.projection_variables() if projVars != None: for pvkey, pvval in projVars.items(): common_items.add(c.instantiation_value(pvkey)) if not common_items: return None else: # if common items not empty, ignore existing siblings before creating new siblings sibs = G.child_alists(G.parent_alists(alist.id)[0].id) for x in sibs: if x.id != alist.id: # x.prune() G.prune(x.id) print( f'{pcol.RED}sibling pruned {x.id}{pcol.RESET} {x}{pcol.RESETALL}' ) # setup new sibling branch(s) parent = G.parent_alists(alist.id)[0] op_alist = parent.copy() op_alist.set(alist.get(tt.OPVAR), '') op_alist.set(tt.OP, parent.get(tt.OP)) op_alist.set(tt.OPVAR, parent.get(tt.OPVAR)) op_alist.set(op_alist.get(tt.OPVAR), '') op_alist.state = states.EXPLORED # set as an aggregation node to help with display rendering op_alist.node_type = nt.HNODE G.link(parent, op_alist, 'comp') G.link(alist, op_alist, 'set-comp', create_new_id=False) nodes_enqueue.append((op_alist, parent, False, 'comp')) print( f'{pcol.BLUE}set-comp >> {op_alist.id}{pcol.RESET} {op_alist}{pcol.RESETALL}' ) if alist.children: nodes_enqueue.append((op_alist, alist, False, 'setcomp')) # create children of the new branch # copy to avoid using different version from another thread in loop op_alist_copy = op_alist.copy() for ff in common_items: new_sibling: Alist = op_alist_copy.copy() new_sibling.set(tt.OP, 'value') new_sibling.set(tt.OPVAR, op_alist_copy.get(tt.OPVAR)) new_sibling.set(alist.get(tt.OPVAR), ff) new_sibling.instantiate_variable(alist.get(tt.OPVAR), ff) for ref in new_sibling.variable_references(alist.get(tt.OPVAR)): if ref not in [tt.OPVAR]: new_sibling.set(ref, ff) new_sibling.node_type = nt.ZNODE G.link(op_alist, new_sibling, 'comp_lookup') nodes_enqueue_process.append( (new_sibling, op_alist, True, 'comp_lookup')) print( f'{pcol.BLUE} set-comp-child >>> {new_sibling.id}{pcol.RESET} {new_sibling}{pcol.RESETALL}' ) alist.state = states.IGNORE alist.nodes_to_enqueue_only = nodes_enqueue alist.nodes_to_enqueue_and_process = nodes_enqueue_process return alist
def what(self, G: InferenceGraph, alist: Alist, is_reduced: bool, in_place=True): ''' Explain a reduction of this alist. ''' what = '' how = '' time = "" if alist.get(tt.TIME): time = f" in {alist.get(tt.TIME)}" if not is_reduced: if alist.get(tt.OP) in ['eq', 'gt', 'gte', 'lt', 'lte']: what = f"Failed to compare the values since the values of all items being compare are not known. " elif alist.get(tt.OP) in ['comp']: what = f"Failed to solve the sub-problem. " elif alist.get(tt.OP) in ['value', 'values']: what = f"Failed to determine the {self.ops_text[alist.get(tt.OP)]} of {alist.get(tt.PROPERTY)}{time}." else: what = f"Failed to calculate the {self.ops_text[alist.get(tt.OP)]} of {alist.get(tt.PROPERTY)}{time}." else: if alist.get(tt.OP) in ['eq', 'gt', 'gte', 'lt', 'lte']: vars_compared = alist.get(tt.OPVAR).split(' ') if len(vars_compared) > 1: what = f"Inferred value is '{alist.instantiation_value('?'+ alist.get(tt.OP))}'." how = f"Did a comparison to determine if {alist.instantiation_value(vars_compared[0])} is " + \ f"{self.ops_text[alist.get(tt.OP)]} {alist.instantiation_value(vars_compared[1])}." elif alist.get(tt.OP) in ['comp']: listed_str = '' listed = alist.instantiation_value(alist.get(tt.OPVAR)) if listed: listed = listed.split(',') if len(listed) > 8: listed_str += f"{', '.join(listed[0:8])}, etc" else: listed_str += ', '.join(listed) if listed_str: what = f"Solved the sub-query and found the following values: {listed_str}." else: inferred_value = '' projected = alist.projection_variables() if projected: inferred_value = list(projected.values())[0] if not inferred_value: inferred_value = alist.instantiation_value( alist.get(tt.OPVAR)) if inferred_value: if ':' in alist.get(tt.PROPERTY): listed_str = '' listed = alist.instantiation_value(alist.get( tt.OPVAR)).split(',') if len(listed) > 8: listed_str += f"{', '.join(listed[0:8])}, etc" else: listed_str += ', '.join(listed) what = f"The {alist.get(tt.PROPERTY).split(':')[1]} values found for the sub-query include: {listed_str}." elif (projected or inferred_value) and not alist.get(tt.PROPERTY): # for alists with just a projected value but no property what = f"An input value for operation is {inferred_value}." elif projected and alist.get( tt.OPVAR) not in projected and alist.get( tt.OP) in ['max', 'min']: what = f"The entity whose {alist.get(tt.PROPERTY)}{time} has the {self.ops_text[alist.get(tt.OP)]} of {alist.instantiation_value(alist.get(tt.OPVAR))} is {inferred_value}." elif projected and alist.get( tt.OPVAR) not in projected and alist.get( tt.OP) not in ['max', 'min']: what = f"The {self.ops_text[alist.get(tt.OP)]} of the {alist.get(tt.PROPERTY)}{time} of {inferred_value} is {alist.instantiation_value(alist.get(tt.OPVAR))}." else: what = f"The {self.ops_text[alist.get(tt.OP)]} of the {alist.get(tt.PROPERTY)} of {alist.instantiation_value(tt.SUBJECT)}{time} is {inferred_value}." if alist.get(tt.OP) in [ 'regress', 'nnpredict', 'linregress', 'gpregress', 'nnregress' ]: decomp_items = [] children = G.child_alists(alist.id) # for c in alist.children[0].children: for c in G.child_alists(children[0].id): decomp_items.append(c.get(tt.TIME)) if len(decomp_items) > 0: how = f"Generated a regression function from times between {min(decomp_items)} and {max(decomp_items)}." if in_place: alist.set("what", what) alist.set("how", how) G.add_alist(alist)