示例#1
0
def define_net(config, is_training):
    backbone_net = MobileNetV2Backbone()
    activation = config.activation if not is_training else "None"
    head_net = MobileNetV2Head(input_channel=backbone_net.out_channels,
                               num_classes=config.num_classes,
                               activation=activation)
    net = mobilenet_v2(backbone_net, head_net)
    return backbone_net, head_net, net
def create_network(name, *args, **kwargs):
    if name == "mobilenetv2":
        backbone_net = MobileNetV2Backbone()
        include_top = kwargs["include_top"]
        if include_top is None:
            include_top = True
        if include_top:
            activation = kwargs["activation"]
            head_net = MobileNetV2Head(input_channel=backbone_net.out_channels,
                                       num_classes=int(kwargs["num_classes"]),
                                       activation=activation)
            net = mobilenet_v2(backbone_net, head_net)
            return net
        return backbone_net
    raise NotImplementedError(f"{name} is not implemented in the repo")
示例#3
0
def define_net(args, config):
    backbone_net = MobileNetV2Backbone(platform=args.platform)
    head_net = MobileNetV2Head(input_channel=backbone_net.out_channels, num_classes=config.num_classes)
    net = mobilenet_v2(backbone_net, head_net)

    # load the ckpt file to the network for fine tune or incremental leaning
    if args.pretrain_ckpt:
        if args.train_method == "fine_tune":
            load_ckpt(net, args.pretrain_ckpt)
        elif args.train_method == "incremental_learn":
            load_ckpt(backbone_net, args.pretrain_ckpt, trainable=False)
        elif args.train_method == "train":
            pass
        else:
            raise ValueError("must input the usage of pretrain_ckpt when the pretrain_ckpt isn't None")

    return backbone_net, head_net, net
示例#4
0
        print(
            "epoch: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:5.3f}/{:5.3f}], time:[{:5.3f}], lr:[{:5.3f}]"
            .format(cb_params.cur_epoch_num - 1, cb_params.epoch_num,
                    cur_step_in_epoch, cb_params.batch_num, step_loss,
                    np.mean(self.losses), step_mseconds,
                    self.lr_init[cb_params.cur_step_num - 1]))


if __name__ == '__main__':
    if args_opt.platform == "GPU":
        # train on gpu
        print("train args: ", args_opt)
        print("cfg: ", config_gpu)

        # define net
        net = mobilenet_v2(num_classes=config_gpu.num_classes, platform="GPU")
        # define loss
        if config_gpu.label_smooth > 0:
            loss = CrossEntropyWithLabelSmooth(
                smooth_factor=config_gpu.label_smooth,
                num_classes=config_gpu.num_classes)
        else:
            loss = SoftmaxCrossEntropyWithLogits(is_grad=False,
                                                 sparse=True,
                                                 reduction='mean')
        # define dataset
        epoch_size = config_gpu.epoch_size
        dataset = create_dataset(dataset_path=args_opt.dataset_path,
                                 do_train=True,
                                 config=config_gpu,
                                 platform=args_opt.platform,
示例#5
0
        cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num

        print("epoch: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:5.3f}/{:5.3f}], time:[{:5.3f}], lr:[{:5.3f}]".format(
            cb_params.cur_epoch_num -
            1, cb_params.epoch_num, cur_step_in_epoch, cb_params.batch_num, step_loss,
            np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1]))


if __name__ == '__main__':
    if args_opt.device_target == "GPU":
        # train on gpu
        print("train args: ", args_opt)
        print("cfg: ", config_gpu)

        # define network
        net = mobilenet_v2(num_classes=config_gpu.num_classes, device_target="GPU")
        # define loss
        if config_gpu.label_smooth > 0:
            loss = CrossEntropyWithLabelSmooth(smooth_factor=config_gpu.label_smooth,
                                               num_classes=config_gpu.num_classes)
        else:
            loss = SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction='mean')
        # define dataset
        epoch_size = config_gpu.epoch_size
        dataset = create_dataset(dataset_path=args_opt.dataset_path,
                                 do_train=True,
                                 config=config_gpu,
                                 device_target=args_opt.device_target,
                                 repeat_num=1,
                                 batch_size=config_gpu.batch_size)
        step_size = dataset.get_dataset_size()
示例#6
0
"""
export .mindir format file for MindSpore Lite reasoning.
"""
from mindspore.train.serialization import export, load_checkpoint, load_param_into_net
from mindspore import Tensor
from src.mobilenetV2 import MobileNetV2Backbone, MobileNetV2Head, mobilenet_v2
import numpy as np
import argparse

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='export .mindir model file in the training side.')
    parser.add_argument('--platform', type=str, default='GPU', choices=['Ascend', 'GPU', 'CPU'],
                        help='run platform, only support CPU, GPU and Ascend')
    parser.add_argument('--ckpt_path', type=str, required=True, default='./mobilenetV2-10_1562.ckpt',
                        help='Pretrained checkpoint path')
    parser.add_argument('--mindir_name', type=str, default='mobilenetv2.mindir',
                        help='.mindir model file name')
    args = parser.parse_args()
    backbone_net = MobileNetV2Backbone()
    head_net = MobileNetV2Head(input_channel=backbone_net.out_channels,
                               num_classes=10,
                               activation="Softmax")
    mobilenet = mobilenet_v2(backbone_net, head_net)
    # return a parameter dict for model
    param_dict = load_checkpoint(args.ckpt_path)
    # load the parameter into net
    load_param_into_net(mobilenet, param_dict)
    input = np.random.uniform(0.0, 1.0, size=[32, 3, 224, 224]).astype(np.float32)
    export(mobilenet, Tensor(input), file_name=args.mindir_name, file_format='MINDIR')