Exemplo n.º 1
0
def fetch_internal(net, val, perfix, epoch, is_rnn=False):
    def verify(name):

        exclude = [
            'weight', 'bias', 'gamma', 'beta', 'blockgrad', 'data', 'label'
        ]

        for _ in exclude:
            if _ in name:
                print 'Abandoned:', name
                return False
        # if name.startswith('_'):
        #    return False
        for _ in ['c', 'h']:
            if name == _:
                print 'Abandoned:', name
                return False
        return True

    net = net.get_internals()
    print '\n', net.list_outputs(), '\n'
    features = [
        net[i] for i in range(len(net.list_outputs())) if verify(net[i].name)
    ]
    names = [_.name for _ in features]
    net = mx.sym.Group(features)

    from mxnet.model import load_checkpoint

    sym, arg, aux = load_checkpoint(perfix, epoch)
    if not is_rnn:
        model = mx.model.FeedForward(net,
                                     ctx=mu.gpu(1),
                                     num_epoch=1,
                                     begin_epoch=0)
    else:
        from HeartDeepLearning.RNN import rnn_feed
        model = rnn_feed.Feed(net, ctx=mu.gpu(1), num_epoch=1, begin_epoch=0)

    shape = OrderedDict(val.provide_data + val.provide_label)
    model._init_params(shape)
    model.arg_params.update(arg)
    model.aux_params.update(aux)
    print '\nStart Predict'
    outputs, img, label = mu.predict_draw(model, val)
    outputs = dict(zip(names, outputs))

    arg = {k: v.asnumpy() for k, v in arg.items()}
    aux = {k: v.asnumpy() for k, v in aux.items()}

    for _ in ['outputs', 'arg', 'aux']:
        print '\nKey of %s' % _
        o = locals()[_]
        for k in o:
            print k, o[k].shape, o[k].mean(), o[k].std()

    print 'Done'
    return outputs, img, label, arg, aux
Exemplo n.º 2
0
def fetch_internal(net, val, perfix, epoch, is_rnn=False):

    def verify(name):

        exclude = ['weight', 'bias', 'gamma',
                   'beta', 'blockgrad', 'data', 'label']

        for _ in exclude:
            if _ in name:
                print 'Abandoned:', name
                return False
        # if name.startswith('_'):
        #    return False
        for _ in ['c', 'h']:
            if name == _:
                print 'Abandoned:', name
                return False
        return True

    net = net.get_internals()
    print '\n', net.list_outputs(), '\n'
    features = [net[i]
                for i in range(len(net.list_outputs())) if verify(net[i].name)]
    names = [_.name for _ in features]
    net = mx.sym.Group(features)

    from mxnet.model import load_checkpoint

    sym, arg, aux = load_checkpoint(perfix, epoch)
    if not is_rnn:
        model = mx.model.FeedForward(
            net, ctx=mu.gpu(1), num_epoch=1, begin_epoch=0)
    else:
        from HeartDeepLearning.RNN import rnn_feed
        model = rnn_feed.Feed(net, ctx=mu.gpu(1), num_epoch=1, begin_epoch=0)

    shape = OrderedDict(val.provide_data + val.provide_label)
    model._init_params(shape)
    model.arg_params.update(arg)
    model.aux_params.update(aux)
    print '\nStart Predict'
    outputs, img, label = mu.predict_draw(model, val)
    outputs = dict(zip(names, outputs))
    
    arg = { k:v.asnumpy() for k,v in arg.items() }
    aux = { k:v.asnumpy() for k,v in aux.items() }

    for _ in ['outputs', 'arg', 'aux']:
        print '\nKey of %s' % _
        o = locals()[_]
        for k in o:
            print k, o[k].shape, o[k].mean(), o[k].std()

    print 'Done'
    return outputs, img, label, arg, aux
Exemplo n.º 3
0
    def load(prefix, epoch, load_optimizer_states=False, **kwargs):
        """Create a model from previously saved checkpoint.

        Parameters
        ----------
        prefix : str
            path prefix of saved model files. You should have
            "prefix-symbol.json", "prefix-xxxx.params", and
            optionally "prefix-xxxx.states", where xxxx is the
            epoch number.
        epoch : int
            epoch to load.
        load_optimizer_states : bool
            whether to load optimizer states. Checkpoint needs
            to have been made with save_optimizer_states=True.
        data_names : list of str
            Default is `('data')` for a typical model used in image classification.
        label_names : list of str
            Default is `('softmax_label')` for a typical model used in image
            classification.
        logger : Logger
            Default is `logging`.
        context : Context or list of Context
            Default is `cpu()`.
        work_load_list : list of number
            Default `None`, indicating uniform workload.
        fixed_param_names: list of str
            Default `None`, indicating no network parameters are fixed.
        """
        sym, args, auxs = load_checkpoint(prefix, epoch)
        # if loading old trained model
        new_args = copy.deepcopy(args)
        for key in args.keys():
            if "i2r" in key:
                new_key = key.replace("i2r_", "")
                print(key, new_key)
                new_args.pop(key)
                new_args[new_key] = args[key]

        mod = Module(symbol=sym, **kwargs)
        mod._arg_params = new_args
        mod._aux_params = auxs
        mod.params_initialized = True
        if load_optimizer_states:
            mod._preload_opt_states = "%s-%04d.states" % (prefix, epoch)
        return mod
    def load(prefix,
             epoch,
             load_optimizer_states=False,
             symbol=None,
             **kwargs):
        """Creates a model from previously saved checkpoint.

        Parameters
        ----------
        prefix : str
            path prefix of saved model files. You should have
            "prefix-symbol.json", "prefix-xxxx.params", and
            optionally "prefix-xxxx.states", where xxxx is the
            epoch number.
        epoch : int
            epoch to load.
        load_optimizer_states : bool
            whether to load optimizer states. Checkpoint needs
            to have been made with save_optimizer_states=True.
        data_names : list of str
            Default is `('data')` for a typical model used in image classification.
        label_names : list of str
            Default is `('softmax_label')` for a typical model used in image
            classification.
        logger : Logger
            Default is `logging`.
        context : Context or list of Context
            Default is ``cpu()``.
        work_load_list : list of number
            Default ``None``, indicating uniform workload.
        fixed_param_names: list of str
            Default ``None``, indicating no network parameters are fixed.
        """
        sym, args, auxs = load_checkpoint(prefix, epoch)
        sym = sym if symbol is None else symbol
        mod = CustomModule(symbol=sym, **kwargs)
        mod._arg_params = args
        mod._aux_params = auxs
        mod.params_initialized = True
        if load_optimizer_states:
            mod._preload_opt_states = '%s-%04d.states' % (prefix, epoch)
        return mod
Exemplo n.º 5
0
    def load(prefix, epoch, load_optimizer_states=False, symbol=None, **kwargs):
        """Creates a model from previously saved checkpoint.

        Parameters
        ----------
        prefix : str
            path prefix of saved model files. You should have
            "prefix-symbol.json", "prefix-xxxx.params", and
            optionally "prefix-xxxx.states", where xxxx is the
            epoch number.
        epoch : int
            epoch to load.
        load_optimizer_states : bool
            whether to load optimizer states. Checkpoint needs
            to have been made with save_optimizer_states=True.
        data_names : list of str
            Default is `('data')` for a typical model used in image classification.
        label_names : list of str
            Default is `('softmax_label')` for a typical model used in image
            classification.
        logger : Logger
            Default is `logging`.
        context : Context or list of Context
            Default is ``cpu()``.
        work_load_list : list of number
            Default ``None``, indicating uniform workload.
        fixed_param_names: list of str
            Default ``None``, indicating no network parameters are fixed.
        """
        sym, args, auxs = load_checkpoint(prefix, epoch)
        sym = sym if symbol is None else symbol
        mod = CustomModule(symbol=sym, **kwargs)
        mod._arg_params = args
        mod._aux_params = auxs
        mod.params_initialized = True
        if load_optimizer_states:
            mod._preload_opt_states = '%s-%04d.states'%(prefix, epoch)
        return mod
Exemplo n.º 6
0
download(model_url, model_file_path)
download(image_url, test_image_path)
download(inference_symbol_url, inference_symbol_path)

zip_ref = zipfile.ZipFile(model_file_path, 'r')
zip_ref.extractall(dir)
zip_ref.close()
zip_ref = zipfile.ZipFile(inference_symbol_path)
zip_ref.extractall(dir)
zip_ref.close()

######################################################################
# Convert and compile model with NNVM or Relay for CPU.

sym = mx.sym.load("%s/%s/ssd_resnet50_inference.json" % (dir, inference_symbol_folder))
_, arg_params, aux_params = load_checkpoint("%s/%s" % (dir, model_name), 0)

import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
    "-f", "--frontend",
    help="Frontend for compilation, nnvm or relay",
    type=str,
    default="nnvm")
args = parser.parse_args()
if args.frontend == "relay":
    net, params = relay.frontend.from_mxnet(sym, {"data": dshape}, arg_params=arg_params, aux_params=aux_params)
    with relay.build_config(opt_level=3):
        graph, lib, params = relay.build(net, target, params=params)
elif args.frontend == "nnvm":
    net, params = from_mxnet(sym, arg_params, aux_params)
Exemplo n.º 7
0
download(model_url, model_file_path)
download(image_url, test_image_path)
download(inference_symbol_url, inference_symbol_path)

zip_ref = zipfile.ZipFile(model_file_path, 'r')
zip_ref.extractall(dir)
zip_ref.close()
zip_ref = zipfile.ZipFile(inference_symbol_path)
zip_ref.extractall(dir)
zip_ref.close()

######################################################################
# Convert and compile model with NNVM for CPU.

sym = mx.sym.load("%s/%s/ssd_resnet50_inference.json" % (dir, inference_symbol_folder))
_, arg_params, aux_params = load_checkpoint("%s/%s" % (dir, model_name), 0)
net, params = from_mxnet(sym, arg_params, aux_params)
with compiler.build_config(opt_level=3):
    graph, lib, params = compiler.build(net, target, {"data": dshape}, params=params)

######################################################################
# Create TVM runtime and do inference

# Preprocess image
image = cv2.imread(test_image_path)
img_data = cv2.resize(image, (dshape[2], dshape[3]))
img_data = img_data[:, :, (2, 1, 0)].astype(np.float32)
img_data -= np.array([123, 117, 104])
img_data = np.transpose(np.array(img_data), (2, 0, 1))
img_data = np.expand_dims(img_data, axis=0)
# Build TVM runtime
Exemplo n.º 8
0
inf_json = "deploy_ssd_inceptionv3_512/deploy_ssd_inceptionv3_512-symbol.json"
#inf_json = "deploy_ssd_vgg16_reduced_512/deploy_ssd_vgg16_reduced_512-symbol.json"
#inf_json = "deploy_ssd_vgg16_reduced_300/deploy_ssd_vgg16_reduced_300-symbol.json"
#inf_json = "deploy_ssd_mobilenet_512/deploy_ssd_mobilenet_512-symbol.json"
#inf_json = "deploy_ssd_mobilenet_608/deploy_ssd_mobilenet_608-symbol.json"
print("mx.sym.load: " + inf_json)
sym = mx.sym.load(inf_json)

#checkp = "deploy_ssd_resnet50_512/deploy_ssd_resnet50_512"
checkp = "deploy_ssd_inceptionv3_512/deploy_ssd_inceptionv3_512"
#checkp = "deploy_ssd_vgg16_reduced_512/deploy_ssd_vgg16_reduced_512"
#checkp = "deploy_ssd_vgg16_reduced_300/deploy_ssd_vgg16_reduced_300"
#checkp = "deploy_ssd_mobilenet_512/deploy_ssd_mobilenet_512"
#checkp = "deploy_ssd_mobilenet_608/deploy_ssd_mobilenet_608"
print("load_checkpoint: " + checkp)
_, arg_params, aux_params = load_checkpoint(checkp, 0)

import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-f",
                    "--frontend",
                    help="Frontend for compilation, nnvm or relay",
                    type=str,
                    default="nnvm")
args = parser.parse_args()
if args.frontend == "relay":
    net, params = relay.frontend.from_mxnet(sym, {"data": dshape}, arg_params=arg_params, \
                                            aux_params=aux_params)
    with relay.build_config(opt_level=3):
        graph, lib, params = relay.build(net,
                                         target,
Exemplo n.º 9
0
    def test_ssd():
        model_name = "ssd_resnet50_512"
        model_file = "%s.zip" % model_name
        test_image = "dog.jpg"
        dshape = (1, 3, 512, 512)

        ######################################################################
        # Download MXNet SSD pre-trained model and demo image
        # ---------------------------------------------------
        # Pre-trained model available at
        # https://github.com/apache/incubator-\mxnet/tree/master/example/ssd

        model_url = "https://github.com/zhreshold/mxnet-ssd/releases/download/v0.6/" \
                    "resnet50_ssd_512_voc0712_trainval.zip"
        image_url = "https://cloud.githubusercontent.com/assets/3307514/20012567/" \
                    "cbb60336-a27d-11e6-93ff-cbc3f09f5c9e.jpg"
        inference_symbol_folder = "c1904e900848df4548ce5dfb18c719c7-a28c4856c827fe766aa3da0e35bad41d44f0fb26"
        inference_symbol_url = "https://gist.github.com/kevinthesun/c1904e900848df4548ce5dfb18c719c7/" \
                               "archive/a28c4856c827fe766aa3da0e35bad41d44f0fb26.zip"

        dir = "ssd_model"
        if not os.path.exists(dir):
            os.makedirs(dir)
        model_file_path = "%s/%s" % (dir, model_file)
        test_image_path = "%s/%s" % (dir, test_image)
        inference_symbol_path = "%s/inference_model.zip" % dir
        download(model_url, model_file_path)
        download(image_url, test_image_path)
        download(inference_symbol_url, inference_symbol_path)

        zip_ref = zipfile.ZipFile(model_file_path, 'r')
        zip_ref.extractall(dir)
        zip_ref.close()
        zip_ref = zipfile.ZipFile(inference_symbol_path)
        zip_ref.extractall(dir)
        zip_ref.close()

        ######################################################################
        # Convert and compile model with NNVM for CPU.
        sym = mx.sym.load("%s/%s/ssd_resnet50_inference.json" %
                          (dir, inference_symbol_folder))
        _, arg_params, aux_params = load_checkpoint(
            "%s/%s" % (dir, model_name), 0)
        net, params = from_mxnet(sym, arg_params, aux_params)

        shape_dict = {"data": dshape}
        with nnvm.compiler.build_config(opt_level=3):
            image, tvm_output = heterogeneous_ssd(net, ['nms'],
                                                  shape_dict,
                                                  params, test_image_path)

        #####################################################################

        # Display result

        class_names = ["aeroplane", "bicycle", "bird", "boat", "bottle", "bus",
                       "car", "cat", "chair",
                       "cow", "diningtable", "dog", "horse", "motorbike",
                       "person", "pottedplant",
                       "sheep", "sofa", "train", "tvmonitor"]

        def display(img, out, thresh=0.5):
            import random
            import matplotlib as mpl
            import matplotlib.pyplot as plt
            mpl.rcParams['figure.figsize'] = (10, 10)
            pens = dict()
            plt.clf()
            plt.imshow(img)
            for det in out:
                cid = int(det[0])
                if cid < 0:
                    continue
                score = det[1]
                if score < thresh:
                    continue
                if cid not in pens:
                    pens[cid] = (random.random(),
                                 random.random(), random.random())
                scales = [img.shape[1], img.shape[0]] * 2
                xmin, ymin, xmax, ymax = [
                    int(p * s) for p, s in zip(det[2:6].tolist(), scales)]
                rect = plt.Rectangle((xmin, ymin), xmax - xmin, ymax - ymin,
                                     fill=False,
                                     edgecolor=pens[cid], linewidth=3)
                plt.gca().add_patch(rect)
                text = class_names[cid]
                plt.gca().text(xmin, ymin - 2,
                               '{:s} {:.3f}'.format(text, score),
                               bbox=dict(facecolor=pens[cid], alpha=0.5),
                               fontsize=12, color='white')
            plt.show()

        image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
        display(image, tvm_output.asnumpy()[0], thresh=0.45)