Esempio n. 1
0
                          num_hidden_proj=num_hidden_proj)
        data_names = [x[0] for x in data_test.provide_data]
        label_names = ['softmax_label']
        module = mx.mod.Module(sym,
                               context=contexts,
                               data_names=data_names,
                               label_names=label_names)
    # set the parameters
    module.bind(data_shapes=data_test.provide_data,
                label_shapes=None,
                for_training=False)
    module.set_params(arg_params=arg_params, aux_params=aux_params)

    #kaldiWriter = KaldiWriteOut(None, out_file)
    #kaldiWriter.open_or_fd()
    kaldiWriter = KaldiWriteOut(out_dir + "/feats.scp", out_dir + "/feats.ark")
    kaldiWriter.open()
    for preds, i_batch, batch in module.iter_predict(data_test):
        label = batch.label[0].asnumpy().astype('int32')
        posteriors = preds[0].asnumpy().astype('float32')
        # copy over states
        if decoding_method == METHOD_BUCKETING:
            for (ind, utt) in enumerate(batch.utt_id):
                if utt != "GAP_UTT":
                    posteriors = np.log(posteriors[:label[0][0], 1:] +
                                        1e-20) - np.log(
                                            data_train.label_mean).T
                    kaldiWriter.write(utt, posteriors)
        elif decoding_method == METHOD_SIMPLE:
            for (ind, utt) in enumerate(batch.utt_id):
                if utt != "GAP_UTT":
Esempio n. 2
0
                          num_label=label_dim,
                          output_states=True,
                          num_hidden_proj=num_hidden_proj)
        data_names = [x[0] for x in data_test.provide_data]
        label_names = ['softmax_label']
        module = mx.mod.Module(sym,
                               context=contexts,
                               data_names=data_names,
                               label_names=label_names)
    # set the parameters
    module.bind(data_shapes=data_test.provide_data,
                label_shapes=None,
                for_training=False)
    module.set_params(arg_params=arg_params, aux_params=aux_params)

    kaldiWriter = KaldiWriteOut(None, out_file)
    kaldiWriter.open_or_fd()

    for preds, i_batch, batch in module.iter_predict(data_test):
        #pred_label = np.array(preds[0].asnumpy().argmax(axis=1))
        label = batch.label[0].asnumpy().astype('int32')
        posteriors = preds[0].asnumpy().astype('float32')[0]
        #print np.sum(posteriors[1][:])
        # copy over states
        if decoding_method == METHOD_BUCKETING:
            for (ind, utt) in enumerate(batch.utt_id):
                if utt != "GAP_UTT":
                    #print sum(posteriors[0,:])
                    posteriors = np.log(posteriors[:label[0][0], 1:] +
                                        1e-20) - np.log(
                                            data_train.label_mean).T