Пример #1
0
 def run(self):
     """
     start inferring
     :return:
     """
     # prepare data
     self.eval_data = self.prepare_data()
     # split data
     x_eval, y_eval = self.eval_data
     # init configs from checkpoints json file and flags
     config = load_config(self.flags)
     # init model class
     model_class_name, model_file = config['model_class'], config[
         'model_file']
     self.model_class = find_model(model_class_name, model_file)
     # init model
     model = self.model_class(config)
     model.init()
     # init variables
     model.set_inputs(x_eval)
     # restore model
     load_model(model, self.flags.checkpoint_dir,
                self.flags.checkpoint_name)
     # evaluate
     return model.evaluate(x_eval, y_eval)
Пример #2
0
    def run(self, **kwargs):
        """
        start inferring
        :return:
        """
        # prepare data
        self.eval_data = self.data()
        # split data
        x_eval, y_eval = self.eval_data
        # init configs from checkpoints json file and flags
        config = load_config(self.config)
        # init model class
        model_class_name, model_file_name = config.get(
            'model_class_name'), config.get('model_file_name')
        self.model_class = find_model_class(model_class_name, model_file_name)

        # init model
        model = self.model_class(config=config)
        model.logger = self.logger
        self.logger.info(f'initialize model logger {model.logger} of {model}')

        # restore model
        load_model(model, self.config.get('checkpoint_dir'),
                   self.config.get('checkpoint_name'))
        # evaluate
        return model.evaluate(x_eval, y_eval, **kwargs)
Пример #3
0
 def run(self):
     """
     start inferring
     :return:
     """
     # get test_data
     self.test_data = self.prepare_data()
     # init model
     model = self.model_class(load_config(self.flags))
     model.init()
     # init variables
     model.call(tf.keras.Input(shape=(get_shape(self.test_data))),
                training=False)
     # restore model if exists
     load_model(model, self.flags.checkpoint_dir,
                self.flags.checkpoint_name)
     # infer
     return model.infer(self.test_data)
Пример #4
0
 def run(self):
     """
     start inferring
     :return:
     """
     # prepare data
     self.eval_data = self.prepare_data()
     # split data
     x_eval, y_eval = self.eval_data
     # init model
     model = self.model_class(load_config(self.flags))
     model.init()
     # init variables
     model.call(tf.keras.Input(shape=(get_shape(x_eval))), training=False)
     # restore model
     load_model(model, self.flags.checkpoint_dir,
                self.flags.checkpoint_name)
     # evaluate
     return model.evaluate(x_eval, y_eval)
Пример #5
0
 def run(self, **kwargs):
     """
     start inferring
     :return:
     """
     # get test_data
     self.test_data = self.data()
     # init configs from checkpoints json file and flags
     config = load_config(self.config)
     # init model class
     model_class_name, model_file_name = config.get(
         'model_class_name'), config.get('model_file_name')
     self.model_class = find_model_class(model_class_name, model_file_name)
     # init model
     model = self.model_class(config=config)
     # restore model if exists
     load_model(model, self.config.get('checkpoint_dir'),
                self.config.get('checkpoint_name'))
     # infer
     return model.infer(self.test_data, **kwargs)
Пример #6
0
 def run(self):
     """
     start inferring
     :return:
     """
     # get test_data
     self.test_data = self.prepare_data()
     # init configs from checkpoints json file and flags
     config = load_config(self.flags)
     # init model class
     model_class_name, model_file = config['model_class'], config[
         'model_file']
     self.model_class = find_model(model_class_name, model_file)
     # init model
     model = self.model_class(config)
     model.init()
     # init variables
     model.set_inputs(self.test_data)
     # restore model if exists
     load_model(model, self.flags.checkpoint_dir,
                self.flags.checkpoint_name)
     # infer
     return model.infer(self.test_data)