Ejemplo n.º 1
0
    def test_mask_rcnn(self):
        set_converter('CropAndResize', convert_tf_crop_and_resize)
        onnx_model = keras2onnx.convert_keras(model.keras_model)

        import skimage
        img_path = os.path.join(os.path.dirname(__file__), '../data',
                                'street.jpg')
        image = skimage.io.imread(img_path)
        images = [image]
        case_name = 'mask_rcnn'

        if not os.path.exists(tmp_path):
            os.mkdir(tmp_path)
        temp_model_file = os.path.join(tmp_path, 'temp_' + case_name + '.onnx')
        onnx.save_model(onnx_model, temp_model_file)
        try:
            import onnxruntime
            sess = onnxruntime.InferenceSession(temp_model_file)
        except ImportError:
            return True

        # preprocessing
        molded_images, image_metas, windows = model.mold_inputs(images)
        anchors = model.get_anchors(molded_images[0].shape)
        anchors = np.broadcast_to(anchors,
                                  (model.config.BATCH_SIZE, ) + anchors.shape)

        expected = model.keras_model.predict([
            molded_images.astype(np.float32),
            image_metas.astype(np.float32), anchors
        ])

        actual = \
            sess.run(None, {"input_image": molded_images.astype(np.float32),
                            "input_anchors": anchors,
                            "input_image_meta": image_metas.astype(np.float32)})

        rtol = 1.e-3
        atol = 1.e-6
        compare_idx = [0, 3]
        res = all(
            np.allclose(expected[n_], actual[n_], rtol=rtol, atol=atol)
            for n_ in compare_idx)
        if res and temp_model_file not in self.model_files:  # still keep the failed case files for the diagnosis.
            self.model_files.append(temp_model_file)
        if not res:
            for n_ in compare_idx:
                expected_list = expected[n_].flatten()
                actual_list = actual[n_].flatten()
                print_mismatches(case_name, n_, expected_list, actual_list,
                                 atol, rtol)

        self.assertTrue(res)
    container.add_node("NonMaxSuppression",
                       [box_batch, score_batch, max_output_size, iou_threshold, score_threshold],
                       cast_name,
                       op_version=operator.target_opset,
                       name=nms_node.name)

    cast_batch = scope.get_unique_variable_name(operator.output_full_names[2] + '_btc')
    container.add_node("Unsqueeze", cast_name,
                       cast_batch, op_version=operator.target_opset, axes=[0])
    apply_cast(scope, cast_batch, operator.output_full_names[2], container, to=onnx_proto.TensorProto.INT32)

    apply_identity(scope, box_batch, operator.output_full_names[0], container)
    apply_identity(scope, score_batch, operator.output_full_names[1], container)


set_converter(YOLONMSLayer, convert_NMSLayer)

yolo_model_graph_tiny = None
evaluation_model_graph_tiny = None
nms_model_graph_tiny = None
num_classes = 20

@Graph.trace(
    input_types=[_Ty.F(shape=['N', 3, 'M1', 'M2']), _Ty.F(shape=['N', 2])],
    output_types=[_Ty.F(shape=[1, 'M1', 4]), _Ty.F(shape=[1, num_classes, 'M2']), _Ty.I32(shape=[1, 'M3', 3])],
    outputs=["yolonms_layer_1", "yolonms_layer_1_1", "yolonms_layer_1_2"])
def combine_model_tiny(input_1, image_shape):
    global yolo_model_graph_tiny
    global evaluation_model_graph_tiny
    global nms_model_graph_tiny
    output_1 = yolo_model_graph_tiny(input_1)
Ejemplo n.º 3
0

def on_GatherNd(ctx, node, name, args):
    node.type = "GatherND"
    node.domain = "com.microsoft"


tf2onnx_contrib_op_conversion = {
        'GatherNd': (on_GatherNd, []),
        'CropAndResize': (on_CropAndResize, []),
        'Pad': (on_Pad, []),
        'PadV2': (on_Pad, [])
    }


set_converter(DetectionLayer, convert_DetectionLayer)
set_converter(BatchNorm, convert_BatchNorm)


# Run detection
class_names = ['BG', 'person', 'bicycle', 'car', 'motorcycle', 'airplane',
               'bus', 'train', 'truck', 'boat', 'traffic light',
               'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird',
               'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear',
               'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie',
               'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball',
               'kite', 'baseball bat', 'baseball glove', 'skateboard',
               'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup',
               'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',
               'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza',
               'donut', 'cake', 'chair', 'couch', 'potted plant', 'bed',
Ejemplo n.º 4
0
    create_onnx_node(scope, operator, container, 'PyramidROIAlign')


def convert_DetectionTargetLayer(scope, operator, container):
    create_onnx_node(scope, operator, container, 'DetectionTargetLayer')


def convert_DetectionLayer(scope, operator, container):
    create_onnx_node(scope, operator, container, 'DetectionLayer')


def convert_BatchNorm(scope, operator, container):
    convert_keras_batch_normalization(scope, operator, container)


set_converter(ProposalLayer, convert_ProposalLayer)
set_converter(PyramidROIAlign, convert_PyramidROIAlign)
set_converter(DetectionTargetLayer, convert_DetectionTargetLayer)
set_converter(DetectionLayer, convert_DetectionLayer)
set_converter(BatchNorm, convert_BatchNorm)

oml = keras2onnx.convert_keras(model.keras_model)
onnx.save_model(oml, './mrcnn.onnx')

# class_names = ['BG', 'person', 'bicycle', 'car', 'motorcycle', 'airplane',
#                'bus', 'train', 'truck', 'boat', 'traffic light',
#                'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird',
#                'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear',
#                'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie',
#                'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball',
#                'kite', 'baseball bat', 'baseball glove', 'skateboard',
Ejemplo n.º 5
0
        ('_end', oopb.int64, np.array([DETECTION_MAX_INSTANCES],
                                      dtype='int64')),
        ('_axes', oopb.int64, np.array([0], dtype='int64'))
    ], nms_node.name + '_detection_final')

    attrs = {'axes': [0]}
    container.add_node("Unsqueeze",
                       detection_final,
                       operator.output_full_names[0],
                       op_version=operator.target_opset,
                       name=nms_node.name + '_concat_unsqueeze',
                       **attrs)
    # output shape: [1, num_top_K, 6]


set_converter(DetectionLayer, convert_DetectionLayer)
set_converter(BatchNorm, convert_BatchNorm)

# Run detection
class_names = [
    'BG', 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train',
    'truck', 'boat', 'traffic light', 'fire hydrant', 'stop sign',
    'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',
    'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag',
    'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball', 'kite',
    'baseball bat', 'baseball glove', 'skateboard', 'surfboard',
    'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon',
    'bowl', 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot',
    'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', 'potted plant',
    'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote',
    'keyboard', 'cell phone', 'microwave', 'oven', 'toaster', 'sink',
Ejemplo n.º 6
0
                d = InstanceNormalization()(d)
            return d

        img = Input(shape=self.img_shape)

        d1 = d_layer(img, self.df, normalization=False)
        d2 = d_layer(d1, self.df*2)
        d3 = d_layer(d2, self.df*4)
        d4 = d_layer(d3, self.df*8)

        validity = Conv2D(1, kernel_size=4, strides=1, padding='same')(d4)

        return Model(img, validity)


set_converter(keras_contrib.layers.InstanceNormalization, convert_InstanceNormalizationLayer)


class TestDiscoGAN(unittest.TestCase):

    def setUp(self):
        self.model_files = []

    def tearDown(self):
        for fl in self.model_files:
            os.remove(fl)

    def test_DiscoGAN(self):
        keras_model = DiscoGAN().combined
        batch = 5
        x = np.random.rand(batch, 128, 128, 3).astype(np.float32)