Ejemplo n.º 1
0
MEM_DATA = './data.fbank.memmap'
MEM_TEST = './test.fbank.memmap'
PKL_ID = './ID.pkl'
PKL_ID_TEST = './ID_test.pkl'
MEM_DATA_shape = (621, 180406)
STATE_LENGTH = 1943
PHONE_LENGTH = 48
BATCH_SIZE = 2
########################
#  load DNN open file  #
########################


layers,Ws,bs = pickle.load(open(MODEL_ROOT+MODEL,'rb')) 
nn = DNN(layers,Ws,bs,
         act=ACT_FUNC,
         cost=COST_FUNC)

# read Data #
mem_data = np.memmap(MEM_TEST,dtype='float32',mode='r',shape=MEM_DATA_shape)
IDs = readID(PKL_ID_TEST)
print "Data parsed"

########################
#  Save posteriorgram  #
########################

mem_shape = (len(IDs),PHONE_LENGTH)
posteriorgram = np.memmap(PGRAM_ROOT+PGRAM,
                          dtype='float32',
                          mode='w+',
Ejemplo n.º 2
0
MODEL = "DATA_fbank_LABEL_phonemeState_HIDDEN_LAYERS_2048-2048-2048_L_RATE_0.01_MOMENTUM_0.9_DROPOUT_0.1_EPOCH_200_at_150"

TEST_ROOT = './Data/fbank/'
TEST = 'test.ark'

#TEST = './train_ant.ark'

PREDICTION_ROOT ='./result/prediction/'
PREDICTION = MODEL + '.csv'

########################
#  load DNN open file  #
########################

layers,Ws,bs = pickle.load(open(MODEL_ROOT+MODEL,'rb'))
nn = DNN(layers,Ws,bs)

TEST_DATA,VAL_DATA = readfile_( TEST_ROOT+TEST,1 )
PRED_FILE = open( PREDICTION_ROOT + PREDICTION ,'wb')

# Get Dictionaries
Phone48 = load_liststateto48()
PhoneMap48to39 = load_dict_48to39()

# For CSV
HEADER = ["Id","Prediction"]

########################
#       Predict        #
########################
Ejemplo n.º 3
0
LABEL_LAYER = [len(BATCHED_VECTORS[0][0])]

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

print "Data parsed!!!"

########################
#  Create Neural Net   #
########################
model_path = "./result/model/DATA_fbank_LABEL_phoneme48_HIDDEN_LAYERS_1024-1024_L_RATE_0.01_MOMENTUM_0.9_DROPOUT_0.1_EPOCH_500_at_100"
ll, ww, bb = pickle.load(open(model_path, 'rb'))
nn = DNN(ll,
         ww,
         bb,
         m_norm=MAX_NORM,
         act=ACT_FUNC,
         cost=COST_FUNC,
         momentum_type=MOMENTUM_TYPE)

########################
# pre-Train Neural Net #
########################
'''
print "Start pre-training. pretrain {0} epoches".format(PRETRAIN_EPOCH)
prop_input = data
for l,da in enumerate(DAs):
    for epoch in xrange(PRETRAIN_EPOCH):
        batch_cost = 0
        tStart = time.time()
        for i in xrange( (data.shape[1]-1)/PRETRAIN_BATCH_SIZE + 1):
Ejemplo n.º 4
0
DATA_LAYER  = [ len( BATCHED_TRAINING_SET[0][0] ) ]
LABEL_LAYER = [ len( BATCHED_VECTORS[0][0] ) ]
del BATCHED_TRAINING_SET
del BATCHED_VECTORS

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

print "Data parsed!!!"

########################
#  Create Neural Net   #
########################

nn = DNN(LAYERS)

########################
# pre-Train Neural Net #
########################
'''
print "Start pre-training. pretrain {0} epoches".format(PRETRAIN_EPOCH)
prop_input = data
for l,da in enumerate(DAs):
    for epoch in xrange(PRETRAIN_EPOCH):
        batch_cost = 0
        tStart = time.time()
        for i in xrange( (data.shape[1]-1)/PRETRAIN_BATCH_SIZE + 1):
            begin = i * PRETRAIN_BATCH_SIZE
            if (i+1)*PRETRAIN_BATCH_SIZE > data.shape[1]:
                end = data.shape[1]
Ejemplo n.º 5
0
DATA_LAYER = [mem_data.shape[0]]
LABEL_LAYER = [LABEL_VARIETY]

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

print "Data parsed!!!"

########################
#  Create Neural Net   #
########################

nn = DNN(LAYERS,
         m_norm=MAX_NORM,
         act=ACT_FUNC,
         cost=COST_FUNC,
         momentum_type=MOMENTUM_TYPE)

########################
# pre-Train Neural Net #
########################
'''
print "Start pre-training. pretrain {0} epoches".format(PRETRAIN_EPOCH)
prop_input = data
for l,da in enumerate(DAs):
    for epoch in xrange(PRETRAIN_EPOCH):
        batch_cost = 0
        tStart = time.time()
        for i in xrange( (data.shape[1]-1)/PRETRAIN_BATCH_SIZE + 1):
            begin = i * PRETRAIN_BATCH_SIZE