def get_corenlp_instance(serviceid, engine): if serviceid is "DEFAULT": model = get_corenlp_default_model(engine) else: model = get_corenlp_custom_model(engine) instance = create_instance(model, serviceid=serviceid) return instance
def load(self, serviceid, model_type, engine=None, class_name=None): """ :param serviceid: :param model_type: :param engine: :return: """ if (model_type == MODEL_TYPE_NER): er = create_instance(class_name, serviceid=serviceid) extension = er.get_extension() else: extension = "dat" model_name = get_file_name(serviceid, model_type, engine, extension) self.logger.info( "Params %s,%s,%s,%s - %s" % (serviceid, model_type, engine, extension, model_name)) file_paths = self.__get__file_paths(serviceid, model_name) logging.getLogger(__name__).info(file_paths) try: if os.path.exists(file_paths[0]): if model_type == MODEL_TYPE_NER: er.load(file_paths[1]) return er elif model_type == MODEL_TYPE_IR: f = open(file_paths[1], 'rb') model = pickle.load(f) f.close() return model except: self.logger.warn("Unable to load %s model for service %s" % (model_type, serviceid)) return None
def _instantiate(configuration): assert dict_contains(configuration, ['class']), \ "build_feature_selector: missing mandatory class" params = copy.copy( configuration["params"]) if 'params' in configuration else {} if 'score_func' in params: params['score_func'] = get_function(params['score_func']) return create_instance(configuration["class"], **params)
def load_default_models_celery(self): for model_each in get_default_models_celery(): default_er = create_instance(model_each, serviceid="DEFAULT") if default_er.model is None: logger.warn("Could not load model %s" % model_each) else: self.store.put(default_er, serviceid="DEFAULT", model_type=MODEL_TYPE_NER, engine=default_er.get_engine())
def _build_cross_validation(self, y=None): if dict_contains(self.config, "cross_validation"): cv_config = self.config["cross_validation"] params = cv_config["params"] if 'params' in cv_config else {} if y is not None: params["y"] = y return create_instance(cv_config['class'], **params) return StratifiedShuffleSplit(n_splits=5, test_size=0.33, random_state=42)
def get_resolved_mappings(self, ice_entities, text): resolved_mappings = [] for entity in ice_entities: obj = create_instance(entity['resolutionClass']) mapping = obj.resolve(text) if len(mapping) != 0: for map in mapping: if entity['name'].upper() == map['tag'].upper(): resolved_mappings.append(map) del obj return resolved_mappings
def instance(cfg, impl_attr="class", logger=None): """ :param cfg: :param impl_attr: :param logger: :return: """ logger = logger or logging.getLogger(__name__) assert impl_attr in cfg, 'Mandatory field for identifying implementation missing from config' logger.info("building transformations from configs: %s" % cfg) params = cfg["params"] if 'params' in cfg else {} return create_instance(cfg[impl_attr], **params)
def reload_cache(self, serviceid, model_type, engine, model_class, model_name): if (model_type == "ner"): extension = create_instance(model_class, serviceid=serviceid).get_extension() else: extension = "dat" if engine in get_all_corenlp_engines(): model_file_name = get_corenlp_modelname(serviceid) else: model_file_name = get_file_name(serviceid, model_type, engine, extension) self.get_model_from_remote(model_file_name, serviceid) self.publish(serviceid, model_type, engine, model_class) app_cache.cached_ts_map[model_name] = datetime.datetime.utcnow()
from ice_commons.core.class_utils import create_instance obj = create_instance("ice_commons.er.engines.mitie_ner.MitieNER") print(obj)
def instantiate_trainer(self, custom_entity_model): serviceid = self.serviceid or token() entity_recognizer = create_instance(custom_entity_model, serviceid=serviceid) return entity_recognizer