Example #1
0
# Remove unused variables
print "Removing Unused Variables..."
del TRAINING_SET,TRAINING_ID,TRAINING_FEATURES,LABEL_INDEXES,LABEL_FEATURES,\
    BATCHED_TRAINING_FEATURES,BATCHED_LABEL_FEATURES

#pdb.set_trace()
########################
#  Create Neural Net   #
########################
print "Constructing Neural Network..."
DATA_LAYER  = [ BATCHED_INPUT[0].shape[0] ]
LABEL_LAYER = [ BATCHED_OUTPUT[0].shape[0] ]
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

nn = DNN(LAYERS)

#pdb.set_trace()
########################
# pre-Train Neural Net #
########################
'''
print "Start pre-training. pretrain {0} epoches".format(PRETRAIN_EPOCH)
prop_input = data
for l,da in enumerate(DAs):
    for epoch in xrange(PRETRAIN_EPOCH):
        batch_cost = 0
        tStart = time.time()
        for i in xrange( (data.shape[1]-1)/PRETRAIN_BATCH_SIZE + 1):
            begin = i * PRETRAIN_BATCH_SIZE
            if (i+1)*PRETRAIN_BATCH_SIZE > data.shape[1]:
Example #2
0
LABEL_LAYER = [ len( BATCHED_VECTORS[0][0] ) ]

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

print "Data parsed!!!"

########################
#  Create Neural Net   #
########################
model_path = "./result/model/DATA_fbank_LABEL_phoneme48_HIDDEN_LAYERS_1024-1024_L_RATE_0.01_MOMENTUM_0.9_DROPOUT_0.1_EPOCH_500_at_100" 
ll,ww,bb = pickle.load(open(model_path,'rb'))
nn = DNN(ll,
         ww,
         bb,
         m_norm=MAX_NORM,
         act=ACT_FUNC,
         cost=COST_FUNC,
         momentum_type=MOMENTUM_TYPE)

########################
# pre-Train Neural Net #
########################
'''
print "Start pre-training. pretrain {0} epoches".format(PRETRAIN_EPOCH)
prop_input = data
for l,da in enumerate(DAs):
    for epoch in xrange(PRETRAIN_EPOCH):
        batch_cost = 0
        tStart = time.time()
        for i in xrange( (data.shape[1]-1)/PRETRAIN_BATCH_SIZE + 1):
Example #3
0
TEST_ROOT = './Data'
TEST = '/fbank/test.ark'

#TEST = './train_ant.ark'

PREDICTION_ROOT ='./result/prediction/'
PREDICTION = MODEL + '.csv'

########################
#  load DNN open file  #
########################

ACT_FUNC="leakyReLU"
COST_FUNC ="CE"
layers,Ws,bs = pickle.load(open(MODEL_ROOT+MODEL,'rb')) 
nn = DNN(layers,Ws,bs,act=ACT_FUNC,cost=COST_FUNC)
#
nn.rescale_params(0.9)
#
MODEL = "DATA_fbank_LABEL_phoneme48_HIDDEN_LAYERS_1024-1024-1024-1024_L_RATE_0.001_MOMENTUM_0.9_DROPOUT_0_EPOCH_100"
TEST_DATA,VAL_DATA = readfile( TEST_ROOT+TEST,1 )
PRED_FILE = open( PREDICTION_ROOT + PREDICTION ,'wb')

# Get Dictionaries
Phone48 = load_list39to48()
PhoneMap48to39 = load_dict_48to39()

# For CSV
HEADER = ["Id","Prediction"]

########################
Example #4
0
pickList,val_data,val_label,val_IDs = parse_val_set(mem_data,mem_label,pickList,IDs,VAL_SET_RATIO)
# TRAINING_LABEL is a dict()

DATA_LAYER  = [ mem_data.shape[0] ]
LABEL_LAYER = [ LABEL_VARIETY ]

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER
pdb.set_trace()
print "Data parsed!!!"

########################
#  Create Neural Net   #
########################

nn = DNN(LAYERS)

########################
# pre-Train Neural Net #
########################
'''
print "Start pre-training. pretrain {0} epoches".format(PRETRAIN_EPOCH)
prop_input = data
for l,da in enumerate(DAs):
    for epoch in xrange(PRETRAIN_EPOCH):
        batch_cost = 0
        tStart = time.time()
        for i in xrange( (data.shape[1]-1)/PRETRAIN_BATCH_SIZE + 1):
            begin = i * PRETRAIN_BATCH_SIZE
            if (i+1)*PRETRAIN_BATCH_SIZE > data.shape[1]:
                end = data.shape[1]
MEM_DATA = './data.fbank.memmap'
MEM_TEST = './test.fbank.memmap'
PKL_ID = './ID.pkl'
PKL_ID_TEST = './ID_test.pkl'
MEM_DATA_shape = (621, 180406)
STATE_LENGTH = 1943
PHONE_LENGTH = 48
BATCH_SIZE = 2
########################
#  load DNN open file  #
########################


layers,Ws,bs = pickle.load(open(MODEL_ROOT+MODEL,'rb')) 
nn = DNN(layers,Ws,bs,
         act=ACT_FUNC,
         cost=COST_FUNC)

# read Data #
mem_data = np.memmap(MEM_TEST,dtype='float32',mode='r',shape=MEM_DATA_shape)
IDs = readID(PKL_ID_TEST)
print "Data parsed"

########################
#  Save posteriorgram  #
########################

mem_shape = (len(IDs),PHONE_LENGTH)
posteriorgram = np.memmap(PGRAM_ROOT+PGRAM,
                          dtype='float32',
                          mode='w+',
Example #6
0
MODEL = "DATA_fbank_LABEL_phonemeState_HIDDEN_LAYERS_2048-2048-2048_L_RATE_0.01_MOMENTUM_0.9_DROPOUT_0.1_EPOCH_200_at_150"

TEST_ROOT = './Data/fbank/'
TEST = 'test.ark'

#TEST = './train_ant.ark'

PREDICTION_ROOT ='./result/prediction/'
PREDICTION = MODEL + '.csv'

########################
#  load DNN open file  #
########################

layers,Ws,bs = pickle.load(open(MODEL_ROOT+MODEL,'rb'))
nn = DNN(layers,Ws,bs)

TEST_DATA,VAL_DATA = readfile_( TEST_ROOT+TEST,1 )
PRED_FILE = open( PREDICTION_ROOT + PREDICTION ,'wb')

# Get Dictionaries
Phone48 = load_liststateto48()
PhoneMap48to39 = load_dict_48to39()

# For CSV
HEADER = ["Id","Prediction"]

########################
#       Predict        #
########################
Example #7
0
PGRAM_ROOT ='./result/posteriorgram/'
PGRAM = MODEL + '.pgram'

MEM_DATA = 'data.fbank.memmap'
PKL_ID = 'ID.pkl'
MEM_DATA_shape = (621,1124823)
STATE_LENGTH = 1943
BATCH_SIZE = 1847
########################
#  load DNN open file  #
########################


layers,Ws,bs = pickle.load(open(MODEL_ROOT+MODEL,'rb')) 
nn = DNN(layers,Ws,bs,
         act=ACT_FUNC,
         cost=COST_FUNC)

# read Data #
mem_data = np.memmap(MEM_DATA,dtype='float32',mode='r',shape=MEM_DATA_shape)
IDs = readID(PKL_ID)
print "Data parsed"

########################
#  Save posteriorgram  #
########################

mem_shape = (STATE_LENGTH,len(IDs))
posteriorgram = np.memmap(PGRAM,dtype='float32',mode='w+',shape=mem_shape)

########################
Example #8
0
for data in LABELED_VALIDATION_SET:
    validationNlabel.append(data + [LABEL_DICT[data[0]]])


DATA_LAYER  = [ len( BATCHED_TRAINING_SET[0][0] ) ]
LABEL_LAYER = [ len( BATCHED_VECTORS[0][0] ) ]

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

########################
#  Create Neural Net   #
########################
print "Initializing Neural Network..."

nn = DNN(LAYERS)

########################
# pre-Train Neural Net #
########################
'''
prop_input = _DATA_BATCH_
for l in xrange(1,len(nn.layers)):
    for epoch in PRE_TRAIN_EPOCH:
        nn.pretrainer[l].train()

    # get the hiiden vector of current dA and
    # pass into the next dA as data.
    prop_input = nn.pretrainer[l].get_hidden(prop_input)

'''
Example #9
0
DATA_LAYER  = [ len( BATCHED_TRAINING_SET[0][0] ) ]
LABEL_LAYER = [ len( BATCHED_VECTORS[0][0] ) ]
del BATCHED_TRAINING_SET
del BATCHED_VECTORS

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

print "Data parsed!!!"

########################
#  Create Neural Net   #
########################

nn = DNN(LAYERS)

########################
# pre-Train Neural Net #
########################
'''
print "Start pre-training. pretrain {0} epoches".format(PRETRAIN_EPOCH)
prop_input = data
for l,da in enumerate(DAs):
    for epoch in xrange(PRETRAIN_EPOCH):
        batch_cost = 0
        tStart = time.time()
        for i in xrange( (data.shape[1]-1)/PRETRAIN_BATCH_SIZE + 1):
            begin = i * PRETRAIN_BATCH_SIZE
            if (i+1)*PRETRAIN_BATCH_SIZE > data.shape[1]:
                end = data.shape[1]
Example #10
0
LABEL_LAYER = [len(BATCHED_VECTORS[0][0])]

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

print "Data parsed!!!"

########################
#  Create Neural Net   #
########################
model_path = "./result/model/DATA_fbank_LABEL_phoneme48_HIDDEN_LAYERS_1024-1024_L_RATE_0.01_MOMENTUM_0.9_DROPOUT_0.1_EPOCH_500_at_100"
ll, ww, bb = pickle.load(open(model_path, 'rb'))
nn = DNN(ll,
         ww,
         bb,
         m_norm=MAX_NORM,
         act=ACT_FUNC,
         cost=COST_FUNC,
         momentum_type=MOMENTUM_TYPE)

########################
# pre-Train Neural Net #
########################
'''
print "Start pre-training. pretrain {0} epoches".format(PRETRAIN_EPOCH)
prop_input = data
for l,da in enumerate(DAs):
    for epoch in xrange(PRETRAIN_EPOCH):
        batch_cost = 0
        tStart = time.time()
        for i in xrange( (data.shape[1]-1)/PRETRAIN_BATCH_SIZE + 1):
Example #11
0
for data in LABELED_VALIDATION_SET:
    validationNlabel.append(data + [LABEL_DICT[data[0]]])

DATA_LAYER = [len(BATCHED_TRAINING_SET[0][0])]
LABEL_LAYER = [len(BATCHED_VECTORS[0][0])]

# pdb.set_trace()
LAYERS = DATA_LAYER + HIDDEN_LAYERS + LABEL_LAYER

########################
#  Create Neural Net   #
########################
print "Initializing Neural Network..."

nn = DNN(LAYERS)

########################
# pre-Train Neural Net #
########################
'''
prop_input = _DATA_BATCH_
for l in xrange(1,len(nn.layers)):
    for epoch in PRE_TRAIN_EPOCH:
        nn.pretrainer[l].train()

    # get the hiiden vector of current dA and
    # pass into the next dA as data.
    prop_input = nn.pretrainer[l].get_hidden(prop_input)

'''
Example #12
0
TEST_ROOT = './Data'
TEST = '/fbank/test.ark'

#TEST = './train_ant.ark'

PREDICTION_ROOT = './result/prediction/'
PREDICTION = MODEL + '.csv'

########################
#  load DNN open file  #
########################

ACT_FUNC = "leakyReLU"
COST_FUNC = "CE"
layers, Ws, bs = pickle.load(open(MODEL_ROOT + MODEL, 'rb'))
nn = DNN(layers, Ws, bs, act=ACT_FUNC, cost=COST_FUNC)
#
nn.rescale_params(0.9)
#
MODEL = "DATA_fbank_LABEL_phoneme48_HIDDEN_LAYERS_1024-1024-1024-1024_L_RATE_0.001_MOMENTUM_0.9_DROPOUT_0_EPOCH_100"
TEST_DATA, VAL_DATA = readfile(TEST_ROOT + TEST, 1)
PRED_FILE = open(PREDICTION_ROOT + PREDICTION, 'wb')

# Get Dictionaries
Phone48 = load_list39to48()
PhoneMap48to39 = load_dict_48to39()

# For CSV
HEADER = ["Id", "Prediction"]

########################