def load_model(self,
                   save_model_dir,
                   ckpt_file,
                   sess,
                   saver,
                   is_finetuning=False):
        if (os.path.exists(save_model_dir)
                and os_utils.chkpt_exists(save_model_dir)):
            # Try to restore everything if possible
            saver.restore(sess, ckpt_file)
            print('Model Loaded Normally')
            return 'Model Loaded Normally'
        else:
            print('Failed to Model Loaded Normally from ', ckpt_file)

            exclusions = [
                scope.strip() for scope in 'resnet_v2_50/logits'.split(',')
            ]
            # exclusions = [scope.strip() for scope in '**'.split(',')]
            variables_to_restore = []
            for var in tf.contrib.slim.get_model_variables():
                for exclusion in exclusions:
                    if var.op.name.startswith(exclusion):
                        break
                else:
                    variables_to_restore.append(var)
            # print(variables_to_restore)
            init_fn = tf.contrib.framework.assign_from_checkpoint_fn(
                self.cfg.imagenet__weights_filepath,
                variables_to_restore,
                ignore_missing_vars=False)
            # init_fn = tf.contrib.framework.assign_from_checkpoint_fn(config.imagenet__weights_filepath)
            init_fn(sess)
            print('Some variables loaded from imagenet')
            return 'Failed to Model Loaded Normally from ' + ckpt_file
Exemplo n.º 2
0
    def load_model(self,
                   save_model_dir,
                   ckpt_file,
                   sess,
                   saver,
                   load_logits=False):
        if (os.path.exists(save_model_dir)
                and os_utils.chkpt_exists(save_model_dir)):
            # Try to restore everything if possible
            saver.restore(sess, ckpt_file)
            return 'Model Loaded Normally'

        else:
            if load_logits:
                exclusions = [scope.strip() for scope in '**'.split(',')]
            else:
                exclusions = [
                    scope.strip()
                    for scope in 'global_step,MobilenetV1/Logits'.split(',')
                ]
            variables_to_restore = []
            for var in tf.contrib.slim.get_model_variables():
                for exclusion in exclusions:
                    if var.op.name.startswith(exclusion):
                        break
                else:
                    variables_to_restore.append(var)

            # print(variables_to_restore)
            init_fn = tf.contrib.framework.assign_from_checkpoint_fn(
                self.cfg.imagenet__weights_filepath,
                variables_to_restore,
                ignore_missing_vars=False)
            init_fn(sess)

            return 'Failed to Model Loaded Normally from ' + str(
                ckpt_file
            ) + '. Thus, Loaded Some variables loaded from imagenet'
 def load_model(self,
                save_model_dir,
                ckpt_file,
                sess,
                saver,
                load_logits=False):
     if (os.path.exists(save_model_dir)
             and os_utils.chkpt_exists(save_model_dir)):
         # Try to restore everything if possible
         saver.restore(sess, ckpt_file)
         return 'Model weights initialized from {}'.format(ckpt_file)
     else:
         if (load_logits):
             exclusions = [scope.strip() for scope in '**'.split(',')]
         else:
             exclusions = [
                 scope.strip() for scope in
                 'Logits,InceptionV1/Logits,InceptionV1/AuxLogits'.split(
                     ',')
             ]
         # exclusions = [scope.strip() for scope in '**'.split(',')]
         variables_to_restore = []
         for var in tf.contrib.slim.get_model_variables():
             for exclusion in exclusions:
                 if var.op.name.startswith(exclusion):
                     break
             else:
                 variables_to_restore.append(var)
         # print(variables_to_restore)
         init_fn = tf.contrib.framework.assign_from_checkpoint_fn(
             self.cfg.imagenet_weights_filepath,
             variables_to_restore,
             ignore_missing_vars=False)
         # init_fn = tf.contrib.framework.assign_from_checkpoint_fn(config.imagenet__weights_filepath)
         init_fn(sess)
         return 'Model weights initialized from imageNet'
Exemplo n.º 4
0
    def load_model(self,
                   save_model_dir,
                   ckpt_file,
                   sess,
                   saver,
                   load_logits=False):
        # Try to initialize the network from a custom model
        if (os.path.exists(save_model_dir)
                and os_utils.chkpt_exists(save_model_dir)):
            saver.restore(sess, ckpt_file)
            return 'Model weights initialized from {}'.format(ckpt_file)

        else:  # if custom model is not provided, initialize the network weights using imageNet weights
            if (load_logits):
                exclusions = [scope.strip() for scope in '**'.split(',')]
            else:
                exclusions = [
                    scope.strip()
                    for scope in 'global_step,densenet161/logits'.split(',')
                ]

            variables_to_restore = []
            for var in tf.contrib.slim.get_model_variables():
                for exclusion in exclusions:
                    if var.op.name.startswith(exclusion):
                        break
                else:
                    variables_to_restore.append(var)

            init_fn = tf.contrib.framework.assign_from_checkpoint_fn(
                self.cfg.imagenet_weights_filepath,
                variables_to_restore,
                ignore_missing_vars=False)
            init_fn(sess)

            return 'Model weights initialized from imageNet'