コード例 #1
0
ファイル: run_DNN_SAT.py プロジェクト: chagge/kaldiproj
            for batch_index in xrange(valid_sets.cur_frame_num / batch_size):  # loop over mini-batches
                valid_error.append(valid_fn(index=batch_index))
        valid_sets.initialize_read()
        log('> epoch %d, lrate %f, validation error %f' % (lrate.epoch, lrate.get_rate(), numpy.mean(valid_error)))

        lrate.get_next_rate(current_error = 100 * numpy.mean(valid_error))

    # output both iVecNN and DNN
    _nnet2file(dnn.ivec_layers, set_layer_num = len(ivec_nnet_layers)-1, filename=wdir + '/ivec.finetune.tmp', withfinal=True)
    _nnet2file(dnn.sigmoid_layers, filename=wdir + '/nnet.finetune.tmp')

    # determine whether it's BNF based on layer sizes
    set_layer_num = -1
    withfinal = True
    bnf_layer_index = 1
    while bnf_layer_index < len(hidden_layers_sizes):
        if hidden_layers_sizes[bnf_layer_index] < hidden_layers_sizes[bnf_layer_index - 1]:  
            break
        bnf_layer_index = bnf_layer_index + 1

    if bnf_layer_index < len(hidden_layers_sizes):  # is bottleneck
        set_layer_num = bnf_layer_index+1
        withfinal = False

    # finally convert the nnet to kaldi or janus format
    if do_maxout:
      _nnet2kaldi_maxout(nnet_spec, pool_size = pool_size, set_layer_num = set_layer_num, filein = wdir + '/nnet.finetune.tmp', fileout = output_file, withfinal=withfinal)
    else:
      _nnet2kaldi(nnet_spec, set_layer_num = set_layer_num, filein = wdir + '/nnet.finetune.tmp', fileout = output_file, withfinal=withfinal)
    _nnet2kaldi(ivec_nnet_spec, set_layer_num = len(ivec_nnet_layers)-1, filein = wdir + '/ivec.finetune.tmp', fileout = ivec_output_file, withfinal=False)
コード例 #2
0
ファイル: run_DNN.py プロジェクト: chagge/kaldiproj
    # determine whether it's BNF based on layer sizes
    set_layer_num = -1
    withfinal = True
    bnf_layer_index = 1
    while bnf_layer_index < len(hidden_layers_sizes):
        if hidden_layers_sizes[bnf_layer_index] < hidden_layers_sizes[bnf_layer_index - 1]:  
            break
        bnf_layer_index = bnf_layer_index + 1

    if bnf_layer_index < len(hidden_layers_sizes):  # is bottleneck
        set_layer_num = bnf_layer_index+1
        withfinal = False

    # finally convert the nnet to kaldi or janus format
    if output_format == 'kaldi':
        if do_maxout:
          _nnet2kaldi_maxout(nnet_spec, pool_size = pool_size, set_layer_num = set_layer_num, filein = wdir + '/nnet.finetune.tmp', fileout = output_file, withfinal=withfinal)
        else:
          _nnet2kaldi(nnet_spec, set_layer_num = set_layer_num, filein = wdir + '/nnet.finetune.tmp', fileout = output_file, withfinal=withfinal)
    else:   # janus format
        if do_maxout:
          _nnet2janus_maxout(nnet_spec, pool_size = pool_size, set_layer_num = set_layer_num, filein = wdir + '/nnet.finetune.tmp', fileout = output_file, withfinal=withfinal)
        else:
          _nnet2janus(nnet_spec, set_layer_num = set_layer_num, filein = wdir + '/nnet.finetune.tmp', fileout = output_file, withfinal=withfinal)
    
    end_time = time.clock()
    print >> sys.stderr, ('The code for file ' + os.path.split(__file__)[1] +
                          ' ran for %.2fm' % ((end_time - start_time) / 60.))

コード例 #3
0
ファイル: run_CNN_Fast.py プロジェクト: chagge/kaldiproj
	valid_error = []
	while (not valid_sets.is_finish()):
            valid_sets.load_next_partition(valid_xy)
            for batch_index in xrange(valid_sets.cur_frame_num / batch_size):  # loop over mini-batches
	        valid_error.append(valid_fn(index=batch_index))
        valid_sets.initialize_read()
	log('> epoch %d, lrate %f, validation error %f' % (lrate.epoch, lrate.get_rate(), numpy.mean(valid_error)))

	lrate.get_next_rate(current_error = 100 * numpy.mean(valid_error))

    # output conv layer config
    for i in xrange(len(conv_layer_configs)):
        conv_layer_configs[i]['activation'] = activation_to_txt(conv_activation)
        with open(wdir + '/conv.config.' + str(i), 'wb') as fp:
            json.dump(conv_layer_configs[i], fp, indent=2, sort_keys = True)
            fp.flush()

    # output the conv part
    _cnn2file(cnn.layers[0:len(conv_layer_configs)], filename=conv_output_file)
    # output the full part
    total_layer_number = len(cnn.layers)
    _nnet2file(cnn.layers[len(conv_layer_configs):total_layer_number], filename=wdir + '/nnet.finetune.tmp')
    _nnet2kaldi(str(cnn.conv_output_dim) + ':' + full_nnet_spec, filein = wdir + '/nnet.finetune.tmp', fileout = full_output_file)

    end_time = time.clock()
    print >> sys.stderr, ('The code for file ' +
                          os.path.split(__file__)[1] +
                          ' ran for %.2fm' % ((end_time - start_time) / 60.))