コード例 #1
0
    def next_valid_batch(self, x_, y_, x_skeleton_):
        if len(self.pos_valid) == 0: self.shuffle_valid()
        pos = self.pos_valid.pop()
	#print type(self.Mean_CNN)
	#print type(self.Mean_CNN)
	if not isinstance(self.Mean_CNN,int):
		x_.set_value(normalize(self.x_valid[pos:pos+self.batch_size].astype(config.floatX), self.Mean_CNN, self.Std_CNN) , borrow=True)
        else:
		x_.set_value(normalize(self.x_valid[pos:pos+self.batch_size], self.Mean_CNN, self.Std_CNN) , borrow=True)
        y_.set_value(self.y_valid[pos:pos+self.batch_size] , borrow=True)
        x_skeleton_.set_value( normalize(self.x_valid_skeleton_feature[pos:pos+self.batch_size], self.Mean1, self.Std1) , borrow=True)
            video, Feature_gesture = sample.get_test_data_wudi_lio(used_joints)
            assert video.shape[0] == Feature_gesture.shape[0]# -*- coding: utf-8 -*-
            
            print "finish preprocessing"
            out_file = open(save_path, 'wb')
            cPickle.dump({"video":video, "Feature_gesture":Feature_gesture}, out_file, protocol=cPickle.HIGHEST_PROTOCOL)
            out_file.close()

        print "start computing likelihood"
        observ_likelihood = numpy.empty(shape=(video.shape[0],20*STATE_NO+1)) # 20 classed * 5 states + 1 ergodic state
        for batchnumber in xrange(video.shape[0]/batch.micro):

            video_temp = video[batch.micro*batchnumber:batch.micro*(batchnumber+1),:]
            skel_temp =  Feature_gesture[batch.micro*batchnumber:batch.micro*(batchnumber+1),:]  

            x_.set_value(normalize(video_temp, Mean_CNN, Std_CNN).astype("float32"),borrow=True)
            x_skeleton_.set_value(normalize(skel_temp,Mean_skel, Std_skel).astype("float32"), borrow=True)
            
            observ_likelihood[batch.micro*batchnumber:batch.micro*(batchnumber+1),:] =  p_y_given_x()

        # because input batch number should be 64, so here it is a bit of hack:
        video_temp_1 = video[batch.micro* (batchnumber+1):,:]   
        video_temp_2 = numpy.zeros(shape=(64-video_temp_1.shape[0], 2, 2, 4, 64, 64))
        video_temp = numpy.concatenate((video_temp_1, video_temp_2), axis=0)
        skel_temp_1 = Feature_gesture[batch.micro* (batchnumber+1):,:]  
        skel_temp_2 = numpy.zeros(shape=(64-skel_temp_1.shape[0],891))
        skel_temp = numpy.concatenate((skel_temp_1, skel_temp_2), axis=0)
        x_.set_value(normalize(video_temp, Mean_CNN, Std_CNN).astype("float32"),borrow=True)
        x_skeleton_.set_value(normalize(skel_temp,Mean_skel, Std_skel).astype("float32"), borrow=True)

        ob_temp = p_y_given_x()
                "video": video,
                "Feature_gesture": Feature_gesture
            },
                         out_file,
                         protocol=cPickle.HIGHEST_PROTOCOL)
            out_file.close()

        print "start computing likelihood"
        observ_likelihood = numpy.empty(
            shape=(video.shape[0], 20 * STATE_NO +
                   1))  # 20 classed * 5 states + 1 ergodic state
        for batchnumber in xrange(video.shape[0] / batch.micro):

            skel_temp = Feature_gesture[batch.micro * batchnumber:batch.micro *
                                        (batchnumber + 1), :]
            x_skeleton_.set_value(normalize(skel_temp, Mean_skel,
                                            Std_skel).astype("float32"),
                                  borrow=True)
            observ_likelihood[batch.micro * batchnumber:batch.micro *
                              (batchnumber + 1), :] = test_model()

        # because input batch number should be 64, so here it is a bit of hack:
        skel_temp_1 = Feature_gesture[batch.micro * (batchnumber + 1):, :]
        skel_temp_2 = numpy.zeros(shape=(64 - skel_temp_1.shape[0], 891))
        skel_temp = numpy.concatenate((skel_temp_1, skel_temp_2), axis=0)
        x_skeleton_.set_value(normalize(skel_temp, Mean_skel,
                                        Std_skel).astype("float32"),
                              borrow=True)
        ob_temp = test_model()
        observ_likelihood[batch.micro *
                          (batchnumber +
                           1):, :] = ob_temp[:video_temp_1.shape[0], :]
コード例 #4
0
                "Feature_gesture": Feature_gesture
            },
                         out_file,
                         protocol=cPickle.HIGHEST_PROTOCOL)
            out_file.close()

        print "start computing likelihood"
        observ_likelihood = numpy.empty(
            shape=(video.shape[0], 20 * STATE_NO +
                   1))  # 20 classed * 5 states + 1 ergodic state
        for batchnumber in xrange(video.shape[0] / batch.micro):

            video_temp = video[batch.micro * batchnumber:batch.micro *
                               (batchnumber + 1), :]

            x_.set_value(normalize(video_temp, Mean_CNN,
                                   Std_CNN).astype("float32"),
                         borrow=True)

            observ_likelihood[batch.micro * batchnumber:batch.micro *
                              (batchnumber + 1), :] = test_model()

        # because input batch number should be 64, so here it is a bit of hack:
        video_temp_1 = video[batch.micro * (batchnumber + 1):, :]
        video_temp_2 = numpy.zeros(shape=(64 - video_temp_1.shape[0], 2, 2, 4,
                                          64, 64))
        video_temp = numpy.concatenate((video_temp_1, video_temp_2), axis=0)
        x_.set_value(normalize(video_temp, Mean_CNN,
                               Std_CNN).astype("float32"),
                     borrow=True)

        ob_temp = test_model()
コード例 #5
0
 def next_valid_batch(self, x_, y_, x_skeleton_):
     if len(self.pos_valid) == 0: self.shuffle_valid()
     pos = self.pos_valid.pop()
     x_.set_value(normalize(self.x_valid[pos:pos+self.batch_size], self.Mean_CNN, self.Std_CNN) , borrow=True)
     y_.set_value(self.y_valid[pos:pos+self.batch_size] , borrow=True)
     x_skeleton_.set_value( normalize(self.x_valid_skeleton_feature[pos:pos+self.batch_size], self.Mean1, self.Std1) , borrow=True)
                     protocol=cPickle.HIGHEST_PROTOCOL)
        out_file.close()

        print "start computing likelihood"
        observ_likelihood = numpy.empty(
            shape=(video.shape[0], 20 * STATE_NO +
                   1))  # 20 classed * 5 states + 1 ergodic state
        for batchnumber in xrange(video.shape[0] / batch.micro):

            video_temp = video[batch.micro * batchnumber:batch.micro *
                               (batchnumber + 1), :]
            skel_temp = Feature_gesture[batch.micro * batchnumber:batch.micro *
                                        (batchnumber + 1), :]

            if not isinstance(Mean_CNN, int):
                x_.set_value(normalize(video_temp.astype("float32"), Mean_CNN,
                                       Std_CNN).astype("float32"),
                             borrow=True)
            else:
                x_.set_value(normalize(video_temp, Mean_CNN,
                                       Std_CNN).astype("float32"),
                             borrow=True)
            if not isinstance(Mean_skel, int):
                x_skeleton_.set_value(normalize(skel_temp.astype("float32"),
                                                Mean_skel,
                                                Std_skel).astype("float32"),
                                      borrow=True)
            else:
                x_skeleton_.set_value(normalize(skel_temp, Mean_skel,
                                                Std_skel).astype("float32"),
                                      borrow=True)