def load_net(runner, net, model_path=None):
        if model_path is not None or runner.configer.get('network', 'resume') is not None:
            resume_path = runner.configer.get('network', 'resume')
            resume_path = model_path if model_path is not None else resume_path

            if not os.path.exists(resume_path):
                Log.warn('Resume path: {} not exists...'.format(resume_path))
                return net

            Log.info('Resuming from {}'.format(resume_path))
            resume_dict = torch.load(resume_path, map_location="cpu")
            if 'state_dict' in resume_dict:
                checkpoint_dict = resume_dict['state_dict']

            elif 'model' in resume_dict:
                checkpoint_dict = resume_dict['model']

            elif isinstance(resume_dict, OrderedDict):
                checkpoint_dict = resume_dict

            else:
                raise RuntimeError(
                    'No state_dict found in checkpoint file {}'.format(runner.configer.get('network', 'resume')))

            # load state_dict
            if hasattr(net, 'module'):
                RunnerHelper.load_state_dict(net.module, checkpoint_dict,
                                             runner.configer.get('network', 'resume_strict'))
            else:
                RunnerHelper.load_state_dict(net, checkpoint_dict, runner.configer.get('network', 'resume_strict'))

            if runner.configer.get('network', 'resume_continue'):
                runner.runner_state = resume_dict['runner_state']

        return net
Exemple #2
0
    def __getitem__(self, index):
        img = None
        valid = True
        while img is None:
            try:
                img = ImageHelper.read_image(
                    self.img_list[index],
                    tool=self.configer.get('data', 'image_tool'),
                    mode=self.configer.get('data', 'input_mode'))
                assert isinstance(img, np.ndarray) or isinstance(
                    img, Image.Image)
            except:
                Log.warn('Invalid image path: {}'.format(self.img_list[index]))
                img = None
                valid = False
                index = (index + 1) % len(self.img_list)

        label = torch.from_numpy(np.array(self.label_list[index]))
        if self.aug_transform is not None:
            img = self.aug_transform(img)

        if self.img_transform is not None:
            img = self.img_transform(img)

        return dict(valid=valid,
                    img=DataContainer(img, stack=True),
                    label=DataContainer(label, stack=True))
Exemple #3
0
    def __read_file(self, data_dir, dataset):
        img_list = list()
        mlabel_list = list()
        img_dict = dict()
        all_img_list = []
        with open(self.configer.get('data.{}_label_path'.format(dataset)),
                  'r') as file_stream:
            all_img_list += file_stream.readlines()

        if dataset == 'train' and self.configer.get('data.include_val',
                                                    default=False):
            with open(self.configer.get('data.val_label_path'),
                      'r') as file_stream:
                all_img_list += file_stream.readlines()

        for line_cnt in range(len(all_img_list)):
            line_items = all_img_list[line_cnt].strip().split()
            if len(line_items) == 0:
                continue

            path = line_items[0]
            if not os.path.exists(os.path.join(
                    data_dir, path)) or not ImageHelper.is_img(path):
                Log.warn('Invalid Image Path: {}'.format(
                    os.path.join(data_dir, path)))
                continue

            img_list.append(os.path.join(data_dir, path))
            mlabel_list.append([int(item) for item in line_items[1:]])

        assert len(img_list) > 0
        Log.info('Length of {} imgs is {}...'.format(dataset, len(img_list)))
        return img_list, mlabel_list
Exemple #4
0
    def __getitem__(self, index):
        img = None
        valid = True
        while img is None:
            try:
                img = ImageHelper.read_image(self.item_list[index][0],
                                             tool=self.configer.get('data', 'image_tool'),
                                             mode=self.configer.get('data', 'input_mode'))
                assert isinstance(img, np.ndarray) or isinstance(img, Image.Image)
            except:
                Log.warn('Invalid image path: {}'.format(self.item_list[index][0]))
                img = None
                valid = False
                index = (index + 1) % len(self.item_list)

        ori_img_size = ImageHelper.get_size(img)
        if self.aug_transform is not None:
            img = self.aug_transform(img)

        border_hw = ImageHelper.get_size(img)[::-1]
        if self.img_transform is not None:
            img = self.img_transform(img)

        meta = dict(
            valid=valid,
            ori_img_size=ori_img_size,
            border_hw=border_hw,
            img_path=self.item_list[index][0],
            filename=self.item_list[index][1],
            label=self.item_list[index][2]
        )
        return dict(
            img=DataContainer(img, stack=True),
            meta=DataContainer(meta, stack=False, cpu_only=True)
        )
Exemple #5
0
 def __init__(self, max_degree, rotate_ratio=0.5, mean=(104, 117, 123)):
     assert isinstance(max_degree, int)
     self.max_degree = max_degree
     self.ratio = rotate_ratio
     self.mean = mean
     Log.warn(
         'Currently `RandomRotate` is only implemented for `img`, `labelmap` and `maskmap`.'
     )
Exemple #6
0
        def _set_value(key, value):
            """
            We directly operate on `params_root`.
            """
            remained_parts = key.split('.')
            consumed_parts = []

            parent_dict = self.params_root
            while len(remained_parts) > 1:
                cur_key = remained_parts.pop(0)
                consumed_parts.append(cur_key)

                if cur_key not in parent_dict:
                    parent_dict[cur_key] = dict()
                    Log.info('{} not exists, set as `dict()`.'.format(
                        '.'.join(consumed_parts)))
                elif not isinstance(parent_dict[cur_key], dict):
                    Log.error(
                        'Cannot set {child_name} on {root_name}, as {root_name} is `{root_type}`.'
                        .format(root_name='.'.join(consumed_parts),
                                child_name='.'.join(remained_parts),
                                root_type=type(parent_dict[cur_key])))
                    sys.exit(1)

                parent_dict = parent_dict[cur_key]

            cur_key = remained_parts.pop(0)
            consumed_parts.append(cur_key)

            if cur_key.endswith('+'):
                cur_key = cur_key[:-1]
                target = parent_dict.get(cur_key)

                if not isinstance(target, list):
                    Log.error(
                        'Cannot append to {key}, as its type is {target_type}.'
                        .format(key=key[:-1], target_type=type(target)))
                    sys.exit(1)

                target.append(value)
                Log.info('Append {value} to {key}. Current: {target}.'.format(
                    key=key[:-1],
                    value=value,
                    target=target,
                ))
                return

            existing_value = parent_dict.get(cur_key)
            if existing_value is not None:
                Log.warn(
                    'Override {key} using {value}. Previous value: {old_value}.'
                    .format(key=key, value=value, old_value=existing_value))
            else:
                Log.info('Set {key} as {value}.'.format(key=key, value=value))
            parent_dict[cur_key] = value
    def get(self, *key, **kwargs):
        key = '.'.join(key)
        if key not in self.params_root:
            Log.warn('Key: {} not exists'.format(key))

        if key in self.params_root or 'default' in kwargs:
            return self.params_root.get(key, **kwargs)

        else:
            Log.error('{} KeyError: {}.'.format(self._get_caller(), key))
            exit(1)
    def load_state_dict(module, state_dict, strict=False):
        """Load state_dict to a module.
        This method is modified from :meth:`torch.nn.Module.load_state_dict`.
        Default value for ``strict`` is set to ``False`` and the message for
        param mismatch will be shown even if strict is False.
        Args:
            module (Module): Module that receives the state_dict.
            state_dict (OrderedDict): Weights.
            strict (bool): whether to strictly enforce that the keys
                in :attr:`state_dict` match the keys returned by this module's
                :meth:`~torch.nn.Module.state_dict` function. Default: ``False``.
        """
        if list(state_dict.keys())[0].startswith('module.'):
            state_dict = {k[7:]: v for k, v in state_dict.items()}

        unexpected_keys = []
        unmatched_keys = []
        own_state = module.state_dict()
        for name, param in state_dict.items():
            if name not in own_state:
                unexpected_keys.append(name)
                continue
            if isinstance(param, torch.nn.Parameter):
                # backwards compatibility for serialized parameters
                param = param.data

            try:
                own_state[name].copy_(param)
            except Exception:
                if strict:
                    raise RuntimeError('While copying the parameter named {}, '
                                       'whose dimensions in the model are {} and '
                                       'whose dimensions in the checkpoint are {}.'
                                       .format(name, own_state[name].size(),
                                               param.size()))
                else:
                    unmatched_keys.append(name)
        missing_keys = set(own_state.keys()) - set(state_dict.keys())

        err_msg = []
        if unexpected_keys:
            err_msg.append('unexpected key in source state_dict: {}'.format(', '.join(unexpected_keys)))
        if missing_keys:
            err_msg.append('missing keys in source state_dict: {}'.format(', '.join(missing_keys)))
        if unmatched_keys:
            err_msg.append('unmatched keys in source state_dict: {}'.format(', '.join(unmatched_keys)))
        err_msg = '\n'.join(err_msg)
        if err_msg:
            if strict:
                raise RuntimeError(err_msg)
            else:
                Log.warn(err_msg)
Exemple #9
0
    def update_performance(self):

        try:
            rs = self.running_scores[self.save_net_main_key]
            if self.save_net_metric == 'miou':
                perf = rs.get_mean_iou()
            elif self.save_net_metric == 'acc':
                perf = rs.get_pixel_acc()

            max_perf = self.configer.get('max_performance')
            self.configer.update(['performance'], perf)
            if perf > max_perf:
                Log.info('Performance {} -> {}'.format(max_perf, perf))
        except Exception as e:
            Log.warn(e)
Exemple #10
0
    def __read_file(self, root_dir, dataset, label_path):
        img_list = list()
        mlabel_list = list()
        
        with open(label_path, 'r') as file_stream:
            for line in file_stream.readlines():
                line_items = line.rstrip().split()
                path = line_items[0]
                if not os.path.exists(os.path.join(root_dir, path)) or not ImageHelper.is_img(path):
                    Log.warn('Invalid Image Path: {}'.format(os.path.join(root_dir, path)))
                    continue

                img_list.append(os.path.join(root_dir, path))
                mlabel_list.append([int(item) for item in line_items[1:]])

        assert len(img_list) > 0
        Log.info('Length of {} imgs is {}...'.format(dataset, len(img_list)))
        return img_list, mlabel_list
    def __init__(self,
                 args_parser=None,
                 config_file=None,
                 config_dict=None,
                 valid_flag=None):
        self.params_root = None
        if config_dict is not None:
            assert config_file is None
            self.params_root = ConfigFactory.from_dict(config_dict)

        elif config_file is not None:
            if not os.path.exists(config_file):
                Log.error('Json Path:{} not exists!'.format(config_file))
                exit(1)

            self.params_root = ConfigFactory.parse_file(config_file)

        elif 'config_file' in args_parser and args_parser.config_file is not None:
            if not os.path.exists(args_parser.config_file):
                Log.error('Json Path:{} not exists!'.format(
                    args_parser.config_file))
                exit(1)

            self.params_root = ConfigFactory.parse_file(
                args_parser.config_file)

        else:
            Log.warn('Base settings not set!')
            self.params_root = ConfigFactory.from_dict({})

        if args_parser is not None:
            for key, value in args_parser.__dict__.items():
                if valid_flag is not None and key.split('.')[0] != valid_flag:
                    continue

                if key not in self.params_root:
                    self.add(key, value)
                elif value is not None:
                    self.update(key, value)
Exemple #12
0
    def __read_and_split_file(self, root_dir, dataset, label_path):
        img_list = list()
        mlabel_list = list()
        select_interval = int(1 / self.configer.get('data', 'val_ratio'))
        img_dict = dict()
        with open(label_path, 'r') as file_stream:
            for line in file_stream.readlines():
                label = line.strip().split()[1]
                if int(label) in img_dict:
                    img_dict[int(label)].append(line)
                else:
                    img_dict[int(label)] = [line]

        all_img_list = []
        for i in sorted(img_dict.keys()):
            all_img_list += img_dict[i]

        for line_cnt in range(len(all_img_list)):
            if line_cnt % select_interval == 0 and dataset == 'train' and not self.configer.get('data', 'include_val'):
                continue

            if line_cnt % select_interval != 0 and dataset == 'val':
                continue

            line_items = all_img_list[line_cnt].strip().split()
            path = line_items[0]
            if not os.path.exists(os.path.join(root_dir, path)) or not ImageHelper.is_img(path):
                Log.warn('Invalid Image Path: {}'.format(os.path.join(root_dir, path)))
                continue

            img_list.append(os.path.join(root_dir, path))
            mlabel_list.append([int(item) for item in line_items[1:]])

        assert len(img_list) > 0
        Log.info('Length of {} imgs is {} after split trainval...'.format(dataset, len(img_list)))
        return img_list, mlabel_list
    def load_tf_efficientnet_model(model, pretrained=None, strict=False):
        if pretrained is None:
            return model

        if len(glob.glob(pretrained + '*')) == 0 or not os.path.exists(
                glob.glob(pretrained + '*')[0]):
            Log.info('{} not exists.'.format(pretrained))
            return model

        import pdb
        from tensorflow.python import pywrap_tensorflow
        Log.info('Loading pretrained model:{}'.format(pretrained))
        tf_reader = pywrap_tensorflow.NewCheckpointReader(pretrained)
        for tf_key in tf_reader.get_variable_to_shape_map():
            if tf_key.startswith('efficientnet'):
                model_name = tf_key.split('/')[0]
                break
        model_dict = model.state_dict()
        load_dict = dict()
        bn_list = [['running_mean', 'moving_mean'],
                   ['running_var', 'moving_variance'], ['weight', 'gamma'],
                   ['bias', 'beta']]
        # stem and head block
        for block_name in ['stem', 'head']:
            key = '_conv_{}.weight'.format(block_name)
            tf_key = '{}/{}/conv2d/kernel'.format(model_name, block_name)
            assert (key in model_dict)
            tf_value = tf_reader.get_tensor(tf_key)
            load_dict[key] = torch.from_numpy(tf_value.transpose(3, 2, 0, 1))
            for bn_name in bn_list:
                key = '_bn_{}.{}'.format(block_name, bn_name[0])
                tf_key = '{}/{}/tpu_batch_normalization/{}'.format(
                    model_name, block_name, bn_name[1])
                assert (key in model_dict)
                tf_value = tf_reader.get_tensor(tf_key)
                load_dict[key] = torch.from_numpy(tf_value)
        # MBConvBlocks
        module_list = [[['_expand_conv', '_project_conv'], 'conv2d'],
                       [['_depthwise_conv'], 'depthwise_conv2d'],
                       [['_se_reduce', '_se_expand'], 'se/conv2d'],
                       [['_expand_bn', '_depthwise_bn', '_project_bn'],
                        'tpu_batch_normalization']]
        mb_block_id = 0
        mb_block_flag = True
        while mb_block_flag:
            for module in module_list:
                key_id = 0
                tf_key_part = module[1]
                for key_part in module[0]:
                    if key_part.startswith('_depthwise_conv'):
                        key = '_blocks.{}.{}.weight'.format(
                            mb_block_id, key_part)
                        tf_key = '{}/blocks_{}/{}/depthwise_kernel'.format(
                            model_name, mb_block_id, tf_key_part)
                        if key not in model_dict:
                            mb_block_flag = False
                            Log.info('Ignore parameter: {} <---> {}'.format(
                                key, tf_key))
                            continue
                        tf_value = tf_reader.get_tensor(tf_key)
                        load_dict[key] = torch.from_numpy(
                            tf_value.transpose(2, 3, 0, 1))
                    elif key_part.endswith('_conv'):
                        key = '_blocks.{}.{}.weight'.format(
                            mb_block_id, key_part)
                        if key_id > 0:
                            tf_key = '{}_{}'.format(tf_key_part, key_id)
                        else:
                            tf_key = '{}'.format(tf_key_part)
                        tf_key = '{}/blocks_{}/{}/kernel'.format(
                            model_name, mb_block_id, tf_key)
                        if key not in model_dict:
                            Log.info('Ignore parameter: {} <---> {}'.format(
                                key, tf_key))
                            continue
                        tf_value = tf_reader.get_tensor(tf_key)
                        load_dict[key] = torch.from_numpy(
                            tf_value.transpose(3, 2, 0, 1))
                        key_id += 1
                    elif key_part.startswith('_se'):
                        key = '_blocks.{}.{}.weight'.format(
                            mb_block_id, key_part)
                        if key_id > 0:
                            tf_key = '{}_{}'.format(tf_key_part, key_id)
                        else:
                            tf_key = '{}'.format(tf_key_part)
                        tf_key = '{}/blocks_{}/{}/kernel'.format(
                            model_name, mb_block_id, tf_key)
                        if key not in model_dict:
                            Log.info('Ignore parameter: {} <---> {}'.format(
                                key, tf_key))
                            continue
                        tf_value = tf_reader.get_tensor(tf_key)
                        load_dict[key] = torch.from_numpy(
                            tf_value.transpose(3, 2, 0, 1))

                        key = '_blocks.{}.{}.bias'.format(
                            mb_block_id, key_part)
                        if key_id > 0:
                            tf_key = '{}_{}'.format(tf_key_part, key_id)
                        else:
                            tf_key = '{}'.format(tf_key_part)
                        tf_key = '{}/blocks_{}/{}/bias'.format(
                            model_name, mb_block_id, tf_key)
                        tf_value = tf_reader.get_tensor(tf_key)
                        load_dict[key] = torch.from_numpy(tf_value)
                        key_id += 1
                    elif key_part.endswith('_bn'):
                        bn_flag = False
                        for bn_name in bn_list:
                            key = '_blocks.{}.{}.{}'.format(
                                mb_block_id, key_part, bn_name[0])
                            if key_id > 0:
                                tf_key = '{}_{}'.format(tf_key_part, key_id)
                            else:
                                tf_key = '{}'.format(tf_key_part)
                            tf_key = '{}/blocks_{}/{}/{}'.format(
                                model_name, mb_block_id, tf_key, bn_name[1])
                            if key not in model_dict:
                                Log.info(
                                    'Ignore parameter: {} <---> {}'.format(
                                        key, tf_key))
                                continue
                            bn_flag = True
                            tf_value = tf_reader.get_tensor(tf_key)
                            load_dict[key] = torch.from_numpy(tf_value)
                        if bn_flag:
                            key_id += 1
            mb_block_id += 1
        unexpected_keys = []
        unmatched_keys = []
        own_state = model.state_dict()
        for name, param in load_dict.items():
            if name not in own_state:
                unexpected_keys.append(name)
                continue
            if isinstance(param, torch.nn.Parameter):
                # backwards compatibility for serialized parameters
                param = param.data

            try:
                own_state[name].copy_(param)
            except Exception:
                if strict:
                    raise RuntimeError(
                        'While copying the parameter named {}, '
                        'whose dimensions in the model are {} and '
                        'whose dimensions in the checkpoint are {}.'.format(
                            name, own_state[name].size(), param.size()))
                else:
                    unmatched_keys.append(name)
        missing_keys = set(own_state.keys()) - set(load_dict.keys())

        err_msg = []
        if unexpected_keys:
            err_msg.append('unexpected key in source state_dict: {}'.format(
                ', '.join(unexpected_keys)))
        if missing_keys:
            err_msg.append('missing keys in source state_dict: {}'.format(
                ', '.join(missing_keys)))
        if unmatched_keys:
            err_msg.append('unmatched keys in source state_dict: {}'.format(
                ', '.join(unmatched_keys)))
        err_msg = '\n'.join(err_msg)
        if err_msg:
            if strict:
                raise RuntimeError(err_msg)
            else:
                Log.warn(err_msg)
        return model