frame_max = max(IDs_utter, key=lambda x: x[1])
train_data_length = len(pickList) * VAL_SET_RATIO

DATA_LAYER = [mem_pgram.shape[1]]
LABEL_LAYER = DATA_LAYER

LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

########################
#     Create lstm       #
########################

print "Creating lstm..."
nn = LSTM_net(LAYERS,
              batch_size=BATCH_SIZE,
              momentum_type=MOMENTUM_TYPE,
              act_type=ACT_FUNC,
              cost_type=COST_FUNC)

########################
#      Train lstm       #
########################
val_label_vec = None
StateToVec = get_PhoneStateVec()
PhoneState = load_liststateto48()
PhoneIdx = load_dict_IdxPh48()

prev_err = float('inf')
prev_2 = float('inf')
prev_3 = float('inf')
cal_dev = 3
Exemple #2
0
BATCH_SIZE = 37
PKL_ID = './ID_test.pkl'
PGRAM_ROOT= 'dnn_result/posteriorgram/'
DNN_MODEL = 'Angus_2'
MEM_PGRAM = PGRAM_ROOT+DNN_MODEL+'_test.pgram'
MEM_PGRAM_shape = (180406,48)

########################
#  load lstm open file  #
########################
print "Loading lstm..."
layers,W,Wi,Wf,Wo,b,bi,bf,bo = pickle.load(open(MODEL_ROOT+MODEL,'rb'))
nn = LSTM_net(layers,W,Wi,Wf,Wo,b,bi,bf,bo,
             batch_size=BATCH_SIZE,
             momentum_type=MOMENTUM_TYPE,
             act_type=ACT_FUNC,
             cost_type=COST_FUNC)

#IDs,TEST_DATA,VAL_DATA = readfile_for_test( TEST_ROOT+TEST,1 )

print "Reading data..."
mem_pgram = np.memmap(MEM_PGRAM,dtype='float32',mode='r',shape=MEM_PGRAM_shape)
IDs = readID(PKL_ID)
idx = 0
IDs_utter = []
while idx <= len(IDs)-1:
    IDs_utter.append(["_".join(IDs[idx][0].split('_')[0:2]),IDs[idx][1]])
    #IDs_utter = [utter_name,utter_max]
    idx+=IDs[idx][1]
Exemple #3
0
MEM_PGRAM = PGRAM_ROOT + DNN_MODEL + '_test.pgram'
MEM_PGRAM_shape = (180406, 48)

########################
#  load lstm open file  #
########################
print "Loading lstm..."
layers, W, Wi, Wf, Wo, b, bi, bf, bo = pickle.load(
    open(MODEL_ROOT + MODEL, 'rb'))
nn = LSTM_net(layers,
              W,
              Wi,
              Wf,
              Wo,
              b,
              bi,
              bf,
              bo,
              batch_size=BATCH_SIZE,
              momentum_type=MOMENTUM_TYPE,
              act_type=ACT_FUNC,
              cost_type=COST_FUNC)

#IDs,TEST_DATA,VAL_DATA = readfile_for_test( TEST_ROOT+TEST,1 )

print "Reading data..."
mem_pgram = np.memmap(MEM_PGRAM,
                      dtype='float32',
                      mode='r',
                      shape=MEM_PGRAM_shape)
IDs = readID(PKL_ID)
Exemple #4
0
frame_max = max(IDs_utter, key=lambda x: x[1])
train_data_length = len(pickList)*VAL_SET_RATIO

DATA_LAYER  = [ mem_pgram.shape[1] ]
LABEL_LAYER = DATA_LAYER

LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

########################
#     Create lstm       #
########################

print "Creating lstm..."
nn = LSTM_net(LAYERS,
             batch_size = BATCH_SIZE,
             momentum_type = MOMENTUM_TYPE,
             act_type = ACT_FUNC,
             cost_type = COST_FUNC)

########################
#      Train lstm       #
########################
val_label_vec = None
StateToVec = get_PhoneStateVec()
PhoneState = load_liststateto48()
PhoneIdx   = load_dict_IdxPh48()

prev_err = float('inf')
prev_2 = float('inf')
prev_3 = float('inf')
cal_dev = 3