Пример #1
0
def test_model_induced_functions():
  """
  Test evaluating a model with an ontology which has induced functions.
  """

  fake_scene = {
    "objects": ["foo", "bar"],
  }

  types = TypeSystem(["a"])
  functions = [
      types.new_function("test", ("a", "a"), lambda x: True),
      types.new_function("test2", ("a", "a"), Expression.fromstring(r"\x.test(test(x))")),
  ]
  ontology = Ontology(types, functions, [])

  model = Model(scene=fake_scene, ontology=ontology)

  cases = [
    ("Test basic call of an abstract function", r"\a.test2(a)", {"foo": True, "bar": True}),
    ("Test embedded call of an abstract function", r"\a.test(test2(a))", {"foo": True, "bar": True}),
  ]

  def test(msg, expr, expected):
    eq_(model.evaluate(Expression.fromstring(expr)), expected)

  for msg, expr, expected in cases:
    yield test, msg, expr, expected
Пример #2
0
def test_nested_lambda():
  """
  Test evaluation of nested lambda expressions.
  """
  ontology = _make_mock_ontology()

  scene = {"objects": [
    frozendict(x=3, shape="sphere"),
    frozendict(x=4, shape="cube"),
  ]}
  model = Model(scene, ontology)

  eq_(model.evaluate(Expression.fromstring(r"unique(\x.left_of(x,unique(\y.cube(y))))")),
      scene["objects"][0])
  eq_(model.evaluate(Expression.fromstring(r"sphere(unique(\x.left_of(x,unique(\y.cube(y)))))")),
      True)
Пример #3
0
def test_model_stored_partial_application():
  types = TypeSystem(["obj"])
  functions = [
    types.new_function("lotsofargs", ("obj", "obj", "obj"), lambda a, b: b),
  ]
  constants = [
      types.new_constant("obj1", "obj"),
      types.new_constant("obj2", "obj"),
  ]
  ontology = Ontology(types, functions, constants)
  ontology.add_functions([types.new_function("partial", ("obj", "obj"), Expression.fromstring(r"lotsofargs(obj2)"))])

  scene = {"objects": []}
  model = Model(scene, ontology)

  eq_(model.evaluate(Expression.fromstring(r"partial(obj1)")), "obj1")
Пример #4
0
def test_base_function():
  """
  Support domain enumeration when a function appears as a constant in "base"
  form.
  """
  ontology = _make_mock_ontology()
  scene = {"objects": [
    frozendict(x=3, shape="sphere"),
    frozendict(x=4, shape="cube"),
  ]}
  model = Model(scene, ontology)

  eq_(model.evaluate(Expression.fromstring(r"unique(cube)")), scene["objects"][1])
def _test_case(scene, expression, expected, msg=None):
  from pprint import pprint
  print("Objects:")
  pprint(scene['objects'])

  model = Model(scene, ontology)
  expr = Expression.fromstring(expression)
  value = model.evaluate(expr)
  print(expr)
  print("Expected:", expected)
  print("Observed:", value)

  eq_(value, expected, msg)
Пример #6
0
def test_property_function_cache():
  ontology = _make_mock_ontology()
  scene = {"objects": [
    frozendict(x=3, shape="sphere"),
    frozendict(x=4, shape="cube"),
  ]}
  model = Model(scene, ontology)

  ok_("unique" in model._property_function_cache,
      "Should prepare to cache `unique` function")
  eq_(len(model._property_function_cache["unique"]), 0)

  expr = Expression.fromstring(r"unique(\x.sphere(x))")
  expected = scene["objects"][0]

  eq_(model.evaluate(expr), expected)
  ok_(len(model._property_function_cache["unique"]) > 0,
      "Cache should be populated after call")

  eq_(model.evaluate(expr), expected, "Cached evaluation returns the same value")
Пример #7
0
 def test(msg, expr, expected):
   eq_(model.evaluate(Expression.fromstring(expr)), expected)
Пример #8
0
 def test(msg, expr, expected):
   print("ret", model.evaluate(Expression.fromstring(expr)))
   eq_(model.evaluate(Expression.fromstring(expr)), expected)
Пример #9
0
                         include_semantics=True)

#######
# Execute on a scene.

scene = {
    "objects": [
        Object("sphere", "big", "rubber"),
        Object("cube", "small", "metal"),
        Object("cylinder", "small", "rubber"),
    ]
}

model = Model(scene, ontology)
print("the ball")
print(model.evaluate(Expression.fromstring(r"unique(\x.has_shape(x,sphere))")))

######
# Parse an utterance and execute.

learner = WordLearner(lex)

# Update with distant supervision.
learner.update_with_distant("the cube".split(), model, scene['objects'][1])

parser = learner.make_parser()
results = parser.parse("the cube".split())
printCCGDerivation(results[0])

root_token, _ = results[0].label()
print(root_token.semantics())