def __entity_centric_mapper(self, entity_id): """Gets the list of DBpedia types for a given entityID.""" en = Entity() all_types = en.lookup_en(entity_id).get(RDF_TYPE_PROP, []) final_types = [t for t in all_types if self.__valid_final_ec_type(t)] # filtering return final_types
def main(args): # config = {Entity.COLLECTION: args.collection} # if args.lookup_entity: # config[Entity.OPERATION] = Entity.LOOKUP_ENTITY # config[Entity.PARAMETERS] = {Entity.ID: args.id} en = Entity() if args.operation == "lookup_entity": res = en.lookup_en(args.input) elif args.operation == "lookup_name": res = en.lookup_name_dbpedia(args.input) # res = en.lookup_name_facc(args.input) pprint(res)
def gen_train_set(gt, query_file, train_set): """Trains LTR model for entity linking.""" entity, elastic, fcache = Entity(), ElasticCache( ELASTIC_INDICES[0]), FeatureCache() inss = Instances() positive_annots = set() # Adds groundtruth instances (positive instances) PLOGGER.info("Adding groundtruth instances (positive instances) ....") for item in sorted(gt): # qid, query, en_id, mention ltr = LTR(Query(item[1], item[0]), entity, elastic, fcache) ins = ltr.__gen_raw_ins(item[2], item[3]) ins.features = ltr.get_features(ins) ins.target = 1 inss.add_instance(ins) positive_annots.add((item[0], item[2])) # Adds all other instances PLOGGER.info("Adding all other instances (negative instances) ...") for qid, q in sorted(json.load(open(query_file, "r")).items()): PLOGGER.info("Query [" + qid + "]") ltr = LTR(Query(q, qid), entity, elastic, fcache) q_inss = ltr.get_candidate_inss() for ins in q_inss.get_all(): if (qid, ins.get_property("en_id")) in positive_annots: continue ins.target = 0 inss.add_instance(ins) inss.to_json(train_set)
def main(args): config = FileUtils.load_config(args.config) el = EL(config, Entity()) if args.query: res = el.link(args.query) pprint(res) else: el.batch_linking()
def main(args): conf = FileUtils.load_config(args.config) el = EL(conf, Entity(), ElasticCache(DBPEDIA_INDEX), FeatureCache()) if conf.get("gen_model", False): LTR.train(conf) elif args.query: res = el.link(args.query) pprint(res) else: el.batch_linking()
def main(args): en = Entity() if args.operation == "lookup_id": res = en.lookup_en(args.input) elif args.operation == "lookup_sf_dbpedia": res = en.lookup_name_dbpedia(args.input) elif args.operation == "lookup_sf_facc": res = en.lookup_name_facc(args.input) elif args.operation == "freebase2dbpedia": res = en.fb_to_dbp(args.input) elif args.operation == "dbpedia2freebase": res = en.dbp_to_fb(args.input) pprint(res)
from nordlys.core.retrieval.elastic_cache import ElasticCache from nordlys.logic.entity.entity import Entity from nordlys.logic.features.feature_cache import FeatureCache from nordlys.services.el import EL from nordlys.services.er import ER from nordlys.services.tti import TTI from nordlys.core.utils.logging_utils import RequestHandler from nordlys.core.utils.Api_handler import API_Handler import logging, traceback from time import strftime from nordlys.config import LOGGING_PATH, PLOGGER, ELASTIC_INDICES, Api_Log_Path # Variables DBPEDIA_INDEX = ELASTIC_INDICES[0] __entity = Entity() __elastic = ElasticCache(DBPEDIA_INDEX) __fcache = FeatureCache() app = Flask(__name__) def error(str): """ @todo complete error handling :param str: :return: """ res = {"ERROR": str} return jsonify(**res)
def main(args): entity = Entity() query = Query(args[0]) cmns = Cmns(query, entity, cmns_th=0.1) print(cmns.link())
def main(args): entity = Entity() mention = Mention(args[0], entity, cmns_th=0.1) ens = mention.get_cand_ens() print(ens)
def main(): sf = Entity(MONGO_COLLECTION_SF_FACC) men_en = FtrMentionEntity(sf, sources=['facc12']) print(men_en.commonness("/m/0ngyw16", "009 re:cyborg"))