Ejemplo n.º 1
0
def min_len():

    Path = ['nnet_data_logpower_noise_chans_train_normalized/dt05_caf_real','nnet_data_logpower_noise_chans_train_normalized/dt05_bus_real',
            'nnet_data_logpower_noise_chans_train_normalized/dt05_ped_real', 'nnet_data_logpower_noise_chans_train_normalized/dt05_str_real']
    # ,'nnet_data_logpower_1/dt05_caf_real','nnet_data_logpower_1/dt05_ped_real','nnet_data_logpower_1/dt05_str_real'
    #,'nnet_data_logpower_noise/dt05_str_real','nnet_data_logpower_noise/dt05_ped_real'
    # 'chans_noadapt_ds_real/dt05_bus_real','chans_noadapt_ds_real/dt05_caf_real','chans_noadapt_ds_real/dt05_ped_real','chans_noadapt_ds_real/dt05_str_real'

    print("min_len")

    Filenames = np.array([])

    for p in Path:

        for root, dirs, files in os.walk(p):
            for filename in files:

                if filename.split('.')[1] == 'phase' or filename.split('.')[1] == 'predicted':
                    continue

                fp = os.path.join(p, filename)


                Filenames = np.append(Filenames,fp)


    min_len = 100000000
    for fp in Filenames:
        real_wiener_all, wiener_all = readmat(fp, read='inout', keys=('Input', 'Output'))
        if len(wiener_all)<min_len:
            min_len = len(wiener_all)
    return min_len
Ejemplo n.º 2
0
def make_ds(Filenames,context,zero_delimiter = False):
    X,Y = 0,0

    if zero_delimiter:
        xdel = np.zeros((1, context, 257))
        ydel = np.zeros((1, context, 257 * 5))
    for fp in Filenames:

        real_wiener_all, wiener_all = readmat(fp, read='inout', keys=('Input', 'Output'))

        dslength = len(wiener_all)

        X_ = np.zeros((dslength, 1, 257 * 5))
        Y_ = np.zeros((dslength, 1, 257))


        for i in range(np.size(X_, 0)):
            x = wiener_all[i].reshape((1, 257 * 5))
            # x = np.array([max(x[0][i],-35) for i in range (1285)])
            X_[i][0] = x


        for i in range(np.size(Y_, 0)):
            y = real_wiener_all[i].reshape((1, 257))
            # y = np.array([max(y[0][i], -35) for i in range(257)])
            Y_[i][0] = y

        if context:
            X_,Y_ = create_dataset(X_,Y_,context)

            X_ = np.squeeze(X_)
            Y_ = np.squeeze(Y_)

            if zero_delimiter:
               newX_ = np.zeros((len(X_)+1,context,257*5))
               newY_ = np.zeros((len(Y_)+1,context,257))

               newX_[1:len(X_)+1,:,:] = X_
               newY_[1:len(Y_)+1,:,:] = Y_
               X_=newX_
               Y_ =newY_






        if type(X) != type(X_):
            X = X_
            Y = Y_
        else:
            X = np.concatenate((X, X_))
            Y = np.concatenate((Y, Y_))
            print(X.shape)



    return X,Y
Ejemplo n.º 3
0
def main():
    real_wiener_all, wiener_all = readmat(
        'nnet_data_logpower_noise/dt05_bus_real/F01_050C010K_BUS.io.mat',
        keys=('Input', 'Output'),
        read='inout')
    X = np.zeros((len(wiener_all), 1, 257 * 5))
    for i in range(np.size(X, 0)):
        x = wiener_all[i].reshape((1, 257 * 5))

        X[i][0] = x

    Y = np.zeros((len(wiener_all), 1, 257))
    for i in range(np.size(Y, 0)):
        y = real_wiener_all[i].reshape((1, 257))

        Y[i][0] = y
    X1, Y1 = create_dataset(X, Y, 400)
    print(X1.shape, Y1.shape)
    print(X.shape, Y.shape)
Ejemplo n.º 4
0
from keras.models import Sequential
from keras.layers import Dense
from keras.models import model_from_yaml
from keras import losses
from read_data import readmat
from numpy import array,log,exp
import numpy as np
from theano import compile
import matplotlib.pyplot as plt
dslength = 390
import numpy
import os
compile.mode.Mode(linker='py', optimizer=None)
real_wiener_all,wiener_all= array(readmat('network_input.mat'))
#wiener_all_cv, real_wiener_all_cv = array(readmat('network_cv.mat'))

X = np.zeros((dslength,257))
Y = np.zeros((dslength,257))

wiener_all = array(wiener_all)
real_wiener_all = array(real_wiener_all)

for i in range( dslength):
    for j in range(257):
        X1 = wiener_all[i][j]
        #X1 = [X1[k] + 0.01 for k in range(len(X1))]
        X[i][j] = X1

        Y1 = real_wiener_all[i][j]
       # Y1 = [Y1[k] + 0.01 for k in range(len(Y1))]
        Y[i][j] = Y1
Ejemplo n.º 5
0
yaml_file = open(yaml_file_name, 'r')
loaded_model_yaml = yaml_file.read()
yaml_file.close()
loaded_model = model_from_yaml(loaded_model_yaml)
# load weights into new model
loaded_model.load_weights(h5_file_name)

print("Loaded model from disk")
loaded_model.compile(loss=losses.mean_squared_logarithmic_error,
                     optimizer='rmsprop',
                     metrics=['mse', 'mae', 'logcosh'])

for filename in Filenames:

    wiener_all = readmat(filename, keys=('Input'), read='in')

    X = np.zeros((len(wiener_all), 1, 257 * 5))
    for i in range(np.size(X, 0)):
        x = wiener_all[i].reshape((1, 257 * 5))

        X[i][0] = x
    """
    Y = np.zeros((len(wiener_all),1,257))
    for i in range(np.size(Y, 0)):
        y = real_wiener_all[i].reshape((1, 257))

        Y[i][0] = y
    """
    # X, Y = create_dataset(X, Y, 2)
Ejemplo n.º 6
0
def main():
    best = [
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_051C0104_PED.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_050C010N_PED.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_053C0111_PED.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_22HC010R_PED.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_052C010J_PED.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_421C020Q_PED.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_22GC010D_PED.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_420C020P_PED.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_ped_real/M03_422C020F_PED.io.mat'
    ]

    worst = [
        'nnet_data_logpower_noise_chans_train/dt05_bus_real/F01_422C0213_BUS.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_bus_real/F01_420C020K_BUS.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_bus_real/F01_051C0104_BUS.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_str_real/F01_053C0115_STR.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_str_real/F04_423C0204_STR.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_bus_real/F01_051C0111_BUS.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_str_real/F04_423C020I_STR.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_str_real/F01_421C020A_STR.io.mat',
        'nnet_data_logpower_noise_chans_train/dt05_str_real/F01_22GC0113_STR.io.mat'
    ]

    yaml_file_name = 'lstm_5ch_2_layers_real_chans.yaml'
    h5_file_name = 'lstm_5ch_2_layers_real_chans.h5'

    yaml_file = open(yaml_file_name, 'r')
    loaded_model_yaml = yaml_file.read()
    yaml_file.close()
    loaded_model = model_from_yaml(loaded_model_yaml)
    # load weights into new model
    loaded_model.load_weights(h5_file_name)

    print("Loaded model from disk")
    loaded_model.compile(loss=losses.mean_squared_logarithmic_error,
                         optimizer='rmsprop',
                         metrics=['mse', 'mae', 'logcosh'])

    for filename in worst:
        try:
            real_wiener_all, wiener_all = readmat(filename,
                                                  keys=('Input', 'Output'),
                                                  read='inout')
            print(len(real_wiener_all))
        except:
            print(filename, 'error')
            continue

        X = np.zeros((len(wiener_all), 1, 257 * 5))
        for i in range(np.size(X, 0)):
            x = wiener_all[i].reshape((1, 257 * 5))

            X[i][0] = x

        Y = np.zeros((len(wiener_all), 1, 257))
        for i in range(np.size(Y, 0)):
            y = real_wiener_all[i].reshape((1, 257))

            Y[i][0] = y

        Y1 = loaded_model.predict(X, verbose=1)

        num = -1
        bestloss = 0
        for i in range(len(Y1)):

            loss = mean_squared_logarithmic_error(Y[i][0][0:60],
                                                  Y1[i][0][0:60])
            if bestloss < loss:
                bestloss = loss
                num = i

            #Sum_loss +=mean_squared_logarithmic_error(Y[i][0][0:60],Y1[i][0][0:60])
            #Sum_loss  +=losses.mean_absolute_error(Y[i][0][0:60],Y1[i][0][0:60]).eval()
            Ys = Y[i][0]
            Ys = np.expand_dims(Ys, 2)
            Y1s = Y1[i][0]
            Y1s = np.expand_dims(Y1s, 2)
            I = X[i][0]

            pre = plt.plot(Y1s[0:257], label='predicted')

            inp = plt.plot(Ys[0:257], label='Output')
            #plt.legend()
            #plt.show()
        print(filename, num, bestloss)
Ejemplo n.º 7
0
    for i in range(np.size(Y, 0)):
        y = real_wiener_all[i].reshape((1, 257))

        Y[i][0] = y

    Y1 = loaded_model.predict(X,verbose = 1)

    losses_stat[filename] = np.mean(losses.mean_squared_logarithmic_error(Y,Y1).eval())
"""

noise_type_losses = {'bus': [], 'caf': [], 'str': [], 'ped': []}
for noise_type in namelist.keys():
    for filename in namelist[noise_type]:
        try:
            real_wiener_all, wiener_all = readmat(filename,
                                                  keys=('Input', 'Output'),
                                                  read='inout')

        except:
            print(filename, 'error')
            continue

        X = np.zeros((len(wiener_all), 1, 257 * 5))
        for i in range(np.size(X, 0)):
            x = wiener_all[i].reshape((1, 257 * 5))

            X[i][0] = x

        Y = np.zeros((len(wiener_all), 1, 257))
        for i in range(np.size(Y, 0)):
            y = real_wiener_all[i].reshape((1, 257))