コード例 #1
0
ファイル: train.py プロジェクト: shidephen/merlin
    def predict(self, test_x, out_scaler, gen_test_file_list):
        #### compute predictions ####

        io_funcs = BinaryIOCollection()

        test_id_list = test_x.keys()
        test_id_list.sort()
        inference_batch_size = len(test_id_list)
        test_file_number = len(test_id_list)
        with tf.Session(graph=self.graph) as sess:
            new_saver = tf.train.import_meta_graph(self.ckpt_dir,
                                                   "mymodel.ckpt.meta")
            """Notice change targets=tf.get_collection("targets")[0]"""
            inputs_data = self.graph.get_collection("inputs_data")[0]
            """Notice Change decoder_outputs=tf.get_collection("decoder_outputs")[0]"""
            inputs_sequence_length = self.graph.get_collection(
                "inputs_sequence_length")[0]
            target_sequence_length = self.graph.get_collection(
                "target_sequence_length")[0]
            print("loading the model parameters...")
            new_saver.restore(sess, os.path.join(self.ckpt_dir,
                                                 "mymodel.ckpt"))
            print("Model parameters are successfully restored")
            print("generating features on held-out test data...")
            for utt_index in range(test_file_number):
                gen_test_file_name = gen_test_file_list[utt_index]
                temp_test_x = test_x[test_id_list[utt_index]]
                num_of_rows = temp_test_x.shape[0]

                #utt_length=[len(utt) for utt in test_x.values()]
                #max_step=max(utt_length)
                temp_test_x = tf.reshape(temp_test_x,
                                         [1, num_of_rows, self.n_in])

                outputs = np.zeros(shape=[len(test_x), max_step, self.n_out],
                                   dtype=np.float32)
                #dec_cell=self.graph.get_collection("decoder_cell")[0]
                print("Generating speech parameters ...")
                for t in range(num_of_rows):
                    #  outputs=sess.run(inference_output,{inputs_data:temp_test_x,inputs_sequence_length:utt_length,\
                    #            target_sequence_length:utt_length})
                    _outputs=sess.run(decoder_outputs,feed_dict={inputs_data:temp_test_x,targets:outputs,inputs_sequence_length:[num_of_rows],\
                              target_sequence_length:[num_of_rows]})
                    #   #print _outputs[:,t,:]
                    outputs[:, t, :] = _outputs[:, t, :]

                data_utils.denorm_data(outputs, out_scaler)
                io_funcs.array_to_binary_file(outputs, gen_test_file_name)
                data_utils.drawProgressBar(utt_index + 1, test_file_number)
コード例 #2
0
ファイル: train.py プロジェクト: CSTR-Edinburgh/merlin
    def predict(self, test_x, out_scaler, gen_test_file_list, sequential_training=False, stateful=False):
        #### compute predictions ####

        io_funcs = BinaryIOCollection()

        test_id_list = test_x.keys()
        test_id_list.sort()

        test_file_number = len(test_id_list)

        print("generating features on held-out test data...")
        with tf.Session() as sess:
           new_saver=tf.train.import_meta_graph(os.path.join(self.ckpt_dir,"mymodel.ckpt.meta"))
           print "loading the model parameters..."
           output_layer=tf.get_collection("output_layer")[0]
           input_layer=tf.get_collection("input_layer")[0]
           new_saver.restore(sess,os.path.join(self.ckpt_dir,"mymodel.ckpt"))
           print "The model parameters are successfully restored"
           for utt_index in xrange(test_file_number):
               gen_test_file_name = gen_test_file_list[utt_index]
               temp_test_x        = test_x[test_id_list[utt_index]]
               num_of_rows        = temp_test_x.shape[0]
               if not sequential_training:
                   is_training_batch=tf.get_collection("is_training_batch")[0]
                   if self.dropout_rate!=0.0:
                        is_training_drop=tf.get_collection("is_training_drop")[0]
                        y_predict=sess.run(output_layer,feed_dict={input_layer:temp_test_x,is_training_drop:False,is_training_batch:False})
                   else:
                        y_predict=sess.run(output_layer,feed_dict={input_layer:temp_test_x,is_training_batch:False})
               else:
                        temp_test_x=np.reshape(temp_test_x,[1,num_of_rows,self.n_in])
                        hybrid=0
                        utt_length_placeholder=tf.get_collection("utt_length")[0]
                        if "tanh" in self.hidden_layer_type:
                            hybrid=1
                            is_training_batch=tf.get_collection("is_training_batch")[0]
                        if self.dropout_rate!=0.0:
                           is_training_drop=tf.get_collection("is_training_drop")[0]
                           if hybrid:
                              y_predict=sess.run(output_layer,feed_dict={input_layer:temp_test_x,utt_length_placeholder:[num_of_rows],is_training_drop:False,is_training_batch:False})
                           else:
                              y_predict=sess.run(output_layer,feed_dict={input_layer:temp_test_x,utt_length_placeholder:[num_of_rows],is_training_drop:False})
                        elif hybrid:
                              y_predict=sess.run(output_layer,feed_dict={input_layer:temp_test_x,utt_length_placeholder:[num_of_rows],is_training_batch:False})
                        else:
                              y_predict=sess.run(output_layer,feed_dict={input_layer:temp_test_x,utt_length_placeholder:[num_of_rows]})
               data_utils.denorm_data(y_predict, out_scaler)
               io_funcs.array_to_binary_file(y_predict, gen_test_file_name)
               data_utils.drawProgressBar(utt_index+1, test_file_number)
コード例 #3
0
ファイル: train.py プロジェクト: CSTR-Edinburgh/merlin
      def predict(self,test_x, out_scaler, gen_test_file_list):
          #### compute predictions ####

         io_funcs = BinaryIOCollection()

         test_id_list = test_x.keys()
         test_id_list.sort()
         inference_batch_size=len(test_id_list)
         test_file_number = len(test_id_list)
         with tf.Session(graph=self.graph) as sess:
             new_saver=tf.train.import_meta_graph(self.ckpt_dir,"mymodel.ckpt.meta")
             """Notice change targets=tf.get_collection("targets")[0]"""
             inputs_data=self.graph.get_collection("inputs_data")[0]
             """Notice Change decoder_outputs=tf.get_collection("decoder_outputs")[0]"""
             inputs_sequence_length=self.graph.get_collection("inputs_sequence_length")[0]
             target_sequence_length=self.graph.get_collection("target_sequence_length")[0]
             print "loading the model parameters..."
             new_saver.restore(sess,os.path.join(self.ckpt_dir,"mymodel.ckpt"))
             print "Model parameters are successfully restored"
             print("generating features on held-out test data...")
             for utt_index in xrange(test_file_number):
               gen_test_file_name = gen_test_file_list[utt_index]
               temp_test_x        = test_x[test_id_list[utt_index]]
               num_of_rows        = temp_test_x.shape[0]

         #utt_length=[len(utt) for utt in test_x.values()]
         #max_step=max(utt_length)
               temp_test_x = tf.reshape(temp_test_x,[1,num_of_rows,self.n_in])

               outputs=np.zeros(shape=[len(test_x),max_step,self.n_out],dtype=np.float32)
                #dec_cell=self.graph.get_collection("decoder_cell")[0]
               print "Generating speech parameters ..."
               for t in range(num_of_rows):
                 #  outputs=sess.run(inference_output,{inputs_data:temp_test_x,inputs_sequence_length:utt_length,\
                #            target_sequence_length:utt_length})
                   _outputs=sess.run(decoder_outputs,feed_dict={inputs_data:temp_test_x,targets:outputs,inputs_sequence_length:[num_of_rows],\
                             target_sequence_length:[num_of_rows]})
                #   #print _outputs[:,t,:]
                   outputs[:,t,:]=_outputs[:,t,:]

               data_utils.denorm_data(outputs, out_scaler)
               io_funcs.array_to_binary_file(outputs, gen_test_file_name)
               data_utils.drawProgressBar(utt_index+1, test_file_number)
コード例 #4
0
ファイル: train.py プロジェクト: shidephen/merlin
    def predict(self,
                test_x,
                out_scaler,
                gen_test_file_list,
                sequential_training=False,
                stateful=False):
        #### compute predictions ####

        io_funcs = BinaryIOCollection()

        test_id_list = list(test_x.keys())
        test_id_list.sort()

        test_file_number = len(test_id_list)

        print("generating features on held-out test data...")
        with tf.Session() as sess:
            new_saver = tf.train.import_meta_graph(
                os.path.join(self.ckpt_dir, "mymodel.ckpt.meta"))
            print("loading the model parameters...")
            output_layer = tf.get_collection("output_layer")[0]
            input_layer = tf.get_collection("input_layer")[0]
            new_saver.restore(sess, os.path.join(self.ckpt_dir,
                                                 "mymodel.ckpt"))
            print("The model parameters are successfully restored")
            for utt_index in range(test_file_number):
                gen_test_file_name = gen_test_file_list[utt_index]
                temp_test_x = test_x[test_id_list[utt_index]]
                num_of_rows = temp_test_x.shape[0]
                if not sequential_training:
                    is_training_batch = tf.get_collection(
                        "is_training_batch")[0]
                    if self.dropout_rate != 0.0:
                        is_training_drop = tf.get_collection(
                            "is_training_drop")[0]
                        y_predict = sess.run(output_layer,
                                             feed_dict={
                                                 input_layer: temp_test_x,
                                                 is_training_drop: False,
                                                 is_training_batch: False
                                             })
                    else:
                        y_predict = sess.run(output_layer,
                                             feed_dict={
                                                 input_layer: temp_test_x,
                                                 is_training_batch: False
                                             })
                else:
                    temp_test_x = np.reshape(temp_test_x,
                                             [1, num_of_rows, self.n_in])
                    hybrid = 0
                    utt_length_placeholder = tf.get_collection("utt_length")[0]
                    if "tanh" in self.hidden_layer_type:
                        hybrid = 1
                        is_training_batch = tf.get_collection(
                            "is_training_batch")[0]
                    if self.dropout_rate != 0.0:
                        is_training_drop = tf.get_collection(
                            "is_training_drop")[0]
                        if hybrid:
                            y_predict = sess.run(output_layer,
                                                 feed_dict={
                                                     input_layer:
                                                     temp_test_x,
                                                     utt_length_placeholder:
                                                     [num_of_rows],
                                                     is_training_drop:
                                                     False,
                                                     is_training_batch:
                                                     False
                                                 })
                        else:
                            y_predict = sess.run(output_layer,
                                                 feed_dict={
                                                     input_layer:
                                                     temp_test_x,
                                                     utt_length_placeholder:
                                                     [num_of_rows],
                                                     is_training_drop:
                                                     False
                                                 })
                    elif hybrid:
                        y_predict = sess.run(output_layer,
                                             feed_dict={
                                                 input_layer:
                                                 temp_test_x,
                                                 utt_length_placeholder:
                                                 [num_of_rows],
                                                 is_training_batch:
                                                 False
                                             })
                    else:
                        y_predict = sess.run(output_layer,
                                             feed_dict={
                                                 input_layer:
                                                 temp_test_x,
                                                 utt_length_placeholder:
                                                 [num_of_rows]
                                             })
                data_utils.denorm_data(y_predict, out_scaler)
                io_funcs.array_to_binary_file(y_predict, gen_test_file_name)
                data_utils.drawProgressBar(utt_index + 1, test_file_number)