def input(network,n_tfiles,n_vfiles): # Xtrain_in,Ytrain_in : list of ndarray network.Xtrain_in, network.Ytrain_in, network.train_size = rim.read_Data("ASVspoof2017_V2_train_fbank", "train_info.txt",n_tfiles) # Read Train data network.mean = np.mean(network.Xtrain_in) network.std = np.std(network.Xtrain_in) # Normalize input train set data # network.Xtrain_in = normalize(network.Xtrain_in) network.Xtrain_in = mean_normalization(network.Xtrain_in,network.mean,network.std) # read valiation data network.Xvalid_in, network.Yvalid_in, network.dev_size = rim.read_Data("ASVspoof2017_V2_train_dev", "dev_info.txt",n_vfiles) # Read validation data # # Normalize input validation set data # network.Xvalid_in = normalize(network.Xvalid_in) network.Xvalid_in = mean_normalization(network.Xvalid_in,network.mean,network.std)
def input(self): self.Xtrain_in, self.Ytrain_in, self.train_size = rim.read_Data( "ASVspoof2017_V2_train_fbank", "train_info.txt") # Read Train data # print(self.Xtrain_in) # Normalize input train set data self.Xtrain_in = self.normalize(self.Xtrain_in) print(self.Ytrain_in) print("shape"+str(self.Ytrain_in.shape)) self.Xvalid_in, self.Yvalid_in, self.dev_size = rim.read_Data( "ASVspoof2017_V2_train_dev", "dev_info.txt") # Read validation data # Normalize input validation set data self.Xvalid_in = self.normalize(self.Xvalid_in) print(self.Yvalid_in) print("shape"+str(self.Yvalid_in.shape))
import os import tensorflow as tf from model import CNN from lib.model_io import get_model_id from lib.model_io import restore_variables import read_img as rim model_id = get_model_id() # Create the network network = CNN(model_id) #read DATA Xeval, Yeval, network.eval_size = rim.read_Data("ASVspoof2017_V2_train_eval", "eval_info.txt") Xeval = network.normalize(Xeval) #Normalize eval data # print(network.eval_size/network.batch_size) #define placeholders -predict network.define_predict_operations() # Recover the parameters of the model sess = tf.Session() restore_variables(sess) indx = 0 network.batch_size = 64 # Iterate through eval files and calculate the classification scores # --read data and evaluate for batch_size 64 for all images for i in range(network.eval_size): #how many images
#test reading .cmp files import imp imp.load_source("read_img", "/home/tassos/Desktop/Deep4Deep/src/read_img.py") import numpy as np import matplotlib.pyplot as plt import read_img as rim #rim.read_cmp_file("testRead/T_1000001.cmp") #check read dir_name # cmpl=rim.read_cmp_dir("ASVspoof2017_V2_train_fbank") # print(cmpl[0],cmpl[1]) #check read labels--ok -chk onnly path of protocol_V2 dir #cl_types= rim.read_label("train_info.txt") #print(cl_types) #check read data Xdata, Υdata, nframes = rim.read_Data("ASVspoof2017_V2_train_fbank", "train_info.txt")