def __run(): # Setup default evaluator. evaluator = ThreeClassEvaluator(DataType.Test) experiment_data = RuSentRelTrainingData( labels_scaler=labels_scaler, stemmer=stemmer, evaluator=evaluator, opinion_formatter=RuSentRelOpinionCollectionFormatter(), callback=CallbackEvalF1NPU(DataType.Test)) extra_name_suffix = Common.create_exp_name_suffix( use_balancing=balanced_input, terms_per_context=terms_per_context, dist_in_terms_between_att_ends=dist_in_terms_between_attitude_ends) # Composing experiment. experiment = create_experiment(exp_type=exp_type, experiment_data=experiment_data, folding_type=folding_type, rusentrel_version=rusentrel_version, experiment_io_type=CustomNetworkExperimentIO, ruattitudes_version=ra_version, load_ruattitude_docs=False, extra_name_suffix=extra_name_suffix) full_model_name = Common.create_full_model_name(folding_type=folding_type, model_name=model_name, input_type=model_input_type) model_io = NeuralNetworkModelIO( full_model_name=full_model_name, target_dir=experiment.ExperimentIO.get_target_dir(), # From this depends on whether we have a specific dir or not. source_dir=None if model_name_tag is None else u"", model_name_tag=ModelNameTagArg.NO_TAG if model_name_tag is None else model_name_tag) # Setup model io. experiment_data.set_model_io(model_io) # Check dir existence in advance. model_dir = model_io.get_model_dir() if not exists(model_dir): print u"Skipping [path not exists]: {}".format(model_dir) return engine = ExperimentF1pnuEvaluator(experiment=experiment, data_type=DataType.Test, max_epochs_count=max_epochs_count, forced=force_eval) # Starting evaluation process. engine.run()
use_balancing=balanced_input, terms_per_context=terms_per_context, dist_in_terms_between_att_ends=dist_in_terms_between_attitude_ends) experiment = create_experiment( exp_type=exp_type, experiment_data=experiment_data, folding_type=folding_type, rusentrel_version=rusentrel_version, ruattitudes_version=ra_version, experiment_io_type=CustomNetworkExperimentIO, extra_name_suffix=extra_name_suffix, load_ruattitude_docs=False) full_model_name = Common.create_full_model_name( folding_type=folding_type, model_name=model_name, input_type=model_input_type) model_io = NeuralNetworkModelIO( full_model_name=full_model_name, target_dir=experiment.ExperimentIO.get_target_dir(), source_dir=model_load_dir, embedding_filepath=embedding_filepath, vocab_filepath=vocab_filepath, model_name_tag=model_name_tag) # Setup logging dir. callback.set_log_dir(join(model_io.get_model_dir(), Common.log_dir)) # Setup model io. experiment_data.set_model_io(model_io)
log_formatter = logging.Formatter( '%(asctime)s %(levelname)8s %(name)s | %(message)s') stream_handler.setFormatter(log_formatter) logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) logger.addHandler(stream_handler) # Initialize entity formatter. entity_formatter = create_entity_formatter( fmt_type=entity_formatter_type, create_russian_pos_tagger_func=lambda: POSMystemWrapper( mystem=stemmer.MystemInstance)) # Setup model name. full_model_name = Common.create_full_model_name( sample_fmt_type=sample_formatter_type, entities_fmt_type=entity_formatter_type, labels_count=labels_count) model_io = BertModelIO(full_model_name=full_model_name) # Create experiment data and all the related information. experiment_data = CustomSerializationData( labels_scaler=create_labels_scaler(labels_count), stemmer=stemmer, frames_version=frames_version if parse_frames else None, model_io=model_io, terms_per_context=terms_per_context, dist_in_terms_between_attitude_ends=dist_in_terms_between_attitude_ends ) extra_name_suffix = create_exp_name_suffix(
def _create_model_dir(self, folding_type, model_name, exp_type): return Common.create_full_model_name(folding_type=folding_type, input_type=self.__input_type, model_name=model_name)
def _create_model_name(labels_count, sample_fmt_type): return Common.create_full_model_name( sample_fmt_type=sample_fmt_type, labels_count=labels_count, entities_fmt_type=EntityFormatterTypes.SimpleSharpPrefixed)