def main(args=["E:\\workspace\\spf\\geoquery"]): resource_dir = os.path.join(args[0], "resources/") experiments_dir = os.path.join(args[0], "experiments/") data_dir = os.path.join(experiments_dir, "data") types_files = os.path.join(resource_dir, "geo.types") predicate_ontology = os.path.join(resource_dir, "geo.preds.ont") simple_ontology = os.path.join(resource_dir, "geo.consts.ont") LogicLanguageServices.set_instance( LogicLanguageServices.Builder(TypeRepository(types_files), FlexibleTypeComparator()) .add_constants_to_ontology(simple_ontology) .add_constants_to_ontology(predicate_ontology) .set_numeral_type_name("i") .close_ontology(True) .build() ) expr = LogicalExpressionBuilder.read("(size:<lo,i> (argmax:<<e,t>,<<e,i>,e>> (lambda $0:e (and:<t*,t> (city:<c,t> $0) (loc:<lo,<lo,t>> $0 alaska:s))) (lambda $1:e (size:<lo,i> $1))))") import sys sys.exit(0) category_services = LogicalExpressionCategoryServices(True, True) unfactored_constants = {LogicalConstant.read("the:<<e,t>,e>"), LogicalConstant.read("exists:<<e,t>,t>")} FactoredLexiconServices.set(unfactored_constants) read_lexicon = Lexicon() def text_filter(x): return x read_lexicon.add_entries_from_file(os.path.join(resource_dir, "seed.lex"), text_filter, category_services, LexicalEntry.Origin.FIXED_DOMAIN)
def read_constants_from_file(cls, filename, type_repository): stripped_file = '' for line in open(filename, 'r'): line = line.strip() line = re.split('\\s*//', line)[0] if len(line) != 0: stripped_file += line + " " ret = set() lisp_reader = LispReader(StringIO(stripped_file)) while lisp_reader.has_next(): expr = LogicalConstant.read(lisp_reader.next(), type_repository) ret.add(expr) return ret
def build(self): """ Method for building a LogicLanguageServices :return: spf.mr.lambda_.logic_language_services.LogicLanguageServices """ conjunction_predicate = LogicalConstant.read('and:<t*,t>', self.type_repository) disjunction_predicate = LogicalConstant.read('or:<t*,t>', self.type_repository) negation_predicate = LogicalConstant.read('not:<t,t>', self.type_repository) index_increase_predicate = LogicalConstant.read('inc:<%s,%s>' % ( self.type_repository.get_index_type().get_name(), self.type_repository.get_index_type().get_name()), self.type_repository) true_constant = LogicalConstant.create('true:t', self.type_repository.get_truth_value_type()) false_constant = LogicalConstant.create('false:t', self.type_repository.get_truth_value_type()) if len(self.constants_files) == 0: ontology = None if self.ontology_closed: raise RuntimeError('Closed ontology requested, but no logical constants were provided.') else: constants = self.read_constants_from_files(self.constants_files, self.type_repository) constants.add(conjunction_predicate) constants.add(disjunction_predicate) constants.add(negation_predicate) constants.add(index_increase_predicate) constants.add(true_constant) constants.add(false_constant) ontology = Ontology(constants, self.ontology_closed) return LogicLanguageServices(self.type_repository, self.numeral_type_name, self.type_comparator, ontology, conjunction_predicate, disjunction_predicate, negation_predicate, index_increase_predicate, true_constant, false_constant, self.printer)
def visit_logical_constant(self, logical_constant): self.temp_return = LogicalConstant.create( LogicalConstant.make_name(self.anonymous_name, logical_constant.get_type()), logical_constant.get_type())
def int_to_index_constant(cls, i): name = i + Term.TYPE_SEPARATOR + cls.INSTANCE.type_repository.get_index_type().get_name() if cls.INSTANCE.ontology is not None and cls.INSTANCE.ontology.contains(name): return cls.INSTANCE.ontology.get(name) else: return LogicalConstant.create_dynamic(name, cls.INSTANCE.type_repository.get_index_type())
def read(line, origin): equals_index = line.find("=") tokens_string, constants_string = line[1: equals_index - 1], line[equals_index + 2: -1] tokens = tokens_string.split(", ") constants = [LogicalConstant.read(constant) for constant in constants_string.split()] return Lexeme(tokens, constants, origin)