def __init__(self, config): super().__init__(config) self.model_compiled = False self.model_path = config['solver']['saver']['model_path'] self.checkpoint_dir = get_checkpoint_dir(self.config) self.session_conf = get_session_conf(self.config) self.session = tf.Session(config=self.session_conf) tf.keras.backend.set_session(self.session) self.metrics = self.get_metrics()
def set_experimental_environment(self): """Set the experimental environment.""" # Set configuration session_conf = get_session_conf(self.config) task_config = self.config["data"]["task"] batch_size = task_config['batch_size'] num_epochs = task_config['epochs'] saver_conf = self.config['solver']['saver'] smax_to_keep = saver_conf['max_to_keep'] save_checkpoint_steps = saver_conf['save_checkpoint_steps'] resume_model_path = saver_conf.get('resume_model_path', None) print_every = saver_conf['print_every'] return session_conf, smax_to_keep, batch_size, num_epochs, \ save_checkpoint_steps, \ resume_model_path, print_every
def __init__(self, config): super().__init__(config) self.all_modes = (utils.INFER, utils.EVAL, utils.TRAIN) self.infer_no_label = self.config["data"][utils.INFER].get( 'infer_no_label', False) self.model_config = self.config["model"] self.task_config = self.config["data"]["task"] self.batch_size = self.task_config['batch_size'] self.num_parallel_calls = self.task_config['num_parallel_calls'] self.vocab_min_frequency = self.task_config['vocab_min_frequency'] self.use_custom_vocab = self.task_config.get('use_custom_vocab', False) self.text_vocab_file_path = self.task_config['text_vocab'] self.label_vocab_file_paths = self.task_config['label_vocab'] if not isinstance(self.label_vocab_file_paths, list): self.label_vocab_file_paths = [self.label_vocab_file_paths] self.output_num = len(self.label_vocab_file_paths) self.multi_output = bool(self.output_num > 1) self.multi_text = False self.session_conf = get_session_conf(self.config) self.init_feed_dict = {}