コード例 #1
0
    def process( (i, image, label) ):
        image1_caffe = image.reshape(1, *image.shape)
        data = np.asarray([ image1_caffe ])
        out = net.forward_all(data=data, blobs=[ args.layer ])
        pt1 = utils.flat_shape(out[args.layer][0])

        diffs = []
        dists = []
        for tr in trs:
            f, name = tr['f'], tr['name']

            image2 = f(image)
            image2_caffe = image2.reshape(1, *image.shape)
            data = np.asarray([ image2_caffe ])
            out = net.forward_all(data=data, blobs=[ args.layer ])
            pt2 = utils.flat_shape(out[args.layer][0])

            # compute angle diff and distance
            norm = np.linalg.norm(pt1) * np.linalg.norm(pt2)
            diff = pt2 - pt1
            dist = np.linalg.norm(diff)

            diffs.append( (label, name, diff) )
            dists.append( (label, name, dist) )

        return (diffs, dists)
コード例 #2
0
ファイル: nn_layers.py プロジェクト: nkgfirecream/cute-dl
    def init_params(self):

        #展平纬度, 初始化参数值
        std = 0.01
        shape = (utils.flat_shape(self.inshape),
                 utils.flat_shape(self.outshape))
        wval = np.random.randn(shape[0], shape[1]) * std
        bval = np.zeros(shape[1])

        self.__W = LayerParam(self.name, 'W', wval)
        self.__b = LayerParam(self.name, 'b', bval)
コード例 #3
0
 def weight_initializers(self):
     initializers = {}
     initializers['uniform'] = lambda shape: np.random.uniform(
         -1, 1, shape) * 0.1
     initializers['normal'] = lambda shape: np.random.randn(
         utils.flat_shape(shape)).reshape(shape) * 0.1
     return initializers
コード例 #4
0
    def process( (i, image, label) ):
        res = []
        for tr in trs:
            f, name = tr['f'], tr['name']

            image2 = f(image)
            image2_caffe = image2.reshape(1, *image.shape)
            data = np.asarray([ image2_caffe/255. ]) # normalize
            out = net.forward_all(data=data, blobs=[ args.layer ])
            pt = utils.flat_shape(out[args.layer][0])

            res.append((label, name, pt))

        return res
コード例 #5
0
ファイル: cnn_layers.py プロジェクト: brandonlyg/cute-dl
    def init_params(self):
        inshape = self.__inshape
        outshape = self.__outshape
        in_chnls = inshape[0]
        out_chnls = outshape[0]

        #pdb.set_trace()
        #展平形状(c*kh*kw, c_), 把卷积运算转换成矩阵运算, 优化性能
        shape = (in_chnls * utils.flat_shape(self.__ks), out_chnls)
        wval = self.__W(shape)
        bval = self.__b((shape[1], ))

        W = LayerParam(self.name, 'weight', wval)
        b = LayerParam(self.name, 'bias', bval)

        self.__W = W
        self.__b = b
コード例 #6
0
            relu_conv1x1_inter_res4_rgb = transf_block(inter_res4_rgb, dt, conv_kernel_constraint, training_phase, 'redux_rgb_inter_res4')
            relu_conv1x1_res4_rgb = transf_block(res4_rgb, dt, conv_kernel_constraint, training_phase, 'redux_rgb_res4')
            relu_conv1x1_inter_res5_rgb = transf_block(inter_res5_rgb, dt, conv_kernel_constraint, training_phase, 'redux_rgb_inter_res5')
            relu_conv1x1_res5_rgb = transf_block(res5_rgb, dt, conv_kernel_constraint, training_phase, 'redux_rgb_res5')
            
            relu_conv1x1_res1_depth = transformation_block(res1_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_res1')
            relu_conv1x1_inter_res2_depth = transf_block(inter_res2_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_inter_res2')
            relu_conv1x1_res2_depth = transf_block(res2_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_res2')
            relu_conv1x1_inter_res3_depth = transf_block(inter_res3_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_inter_res3')
            relu_conv1x1_res3_depth = transf_block(res3_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_res3')
            relu_conv1x1_inter_res4_depth = transf_block(inter_res4_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_inter_res4')
            relu_conv1x1_res4_depth = transf_block(res4_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_res4')
            relu_conv1x1_inter_res5_depth = transf_block(inter_res5_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_inter_res5')
            relu_conv1x1_res5_depth = transf_block(res5_depth, dt, conv_kernel_constraint, training_phase, 'redux_depth_res5')
 
        relu_conv1x1_res1_rgb = tf.reshape(relu_conv1x1_res1_rgb, [-1, flat_shape(relu_conv1x1_res1_rgb)])
        relu_conv1x1_inter_res2_rgb = tf.reshape(relu_conv1x1_inter_res2_rgb, [-1, flat_shape(relu_conv1x1_inter_res2_rgb)])
        relu_conv1x1_res2_rgb = tf.reshape(relu_conv1x1_res2_rgb, [-1, flat_shape(relu_conv1x1_res2_rgb)])
        relu_conv1x1_inter_res3_rgb = tf.reshape(relu_conv1x1_inter_res3_rgb, [-1, flat_shape(relu_conv1x1_inter_res3_rgb)])
        relu_conv1x1_res3_rgb = tf.reshape(relu_conv1x1_res3_rgb, [-1, flat_shape(relu_conv1x1_res3_rgb)])
        relu_conv1x1_inter_res4_rgb = tf.reshape(relu_conv1x1_inter_res4_rgb, [-1, flat_shape(relu_conv1x1_inter_res4_rgb)])
        relu_conv1x1_res4_rgb = tf.reshape(relu_conv1x1_res4_rgb, [-1, flat_shape(relu_conv1x1_res4_rgb)])
        relu_conv1x1_inter_res5_rgb = tf.reshape(relu_conv1x1_inter_res5_rgb, [-1, flat_shape(relu_conv1x1_inter_res5_rgb)])
        relu_conv1x1_res5_rgb = tf.reshape(relu_conv1x1_res5_rgb, [-1, flat_shape(relu_conv1x1_res5_rgb)])

        relu_conv1x1_res1_depth = tf.reshape(relu_conv1x1_res1_depth, [-1, flat_shape(relu_conv1x1_res1_depth)])
        relu_conv1x1_inter_res2_depth = tf.reshape(relu_conv1x1_inter_res2_depth, [-1, flat_shape(relu_conv1x1_inter_res2_depth)])
        relu_conv1x1_res2_depth = tf.reshape(relu_conv1x1_res2_depth, [-1, flat_shape(relu_conv1x1_res2_depth)])
        relu_conv1x1_inter_res3_depth = tf.reshape(relu_conv1x1_inter_res3_depth, [-1, flat_shape(relu_conv1x1_inter_res3_depth)])
        relu_conv1x1_res3_depth = tf.reshape(relu_conv1x1_res3_depth, [-1, flat_shape(relu_conv1x1_res3_depth)])
        relu_conv1x1_inter_res4_depth = tf.reshape(relu_conv1x1_inter_res4_depth, [-1, flat_shape(relu_conv1x1_inter_res4_depth)])
コード例 #7
0
ファイル: nn_layers.py プロジェクト: brandonlyg/cute-dl
 def set_prev(self, prev_layer):
     inshape = prev_layer.outshape
     self.__inshape = inshape
     self.__outshape = (utils.flat_shape(inshape), )