Exemple #1
0
    theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
    cfg = pickle.load(smart_open(nnet_cfg,'rb'))
    cfg.init_activation()
    model = None
    if cfg.model_type == 'DNN':
        model = DNN(numpy_rng=numpy_rng, theano_rng = theano_rng, cfg = cfg)
    elif cfg.model_type == 'CNN':
        model = CNN(numpy_rng=numpy_rng, theano_rng = theano_rng, cfg = cfg, testing = True)

    # load model parameters
    _file2nnet(model.layers, path = nnet_param)

    # initialize data reading
    cfg.init_data_reading_test(data_spec)

    model.dumpLayerSize()

    # get the function for feature extraction
    log('> ... getting the feat-extraction function for layer='+str(layer_index))
    extract_func = model.build_extract_feat_function(layer_index)

    output_mats = []    # store the features for all the data in memory. TODO: output the features in a streaming mode
    log('> ... generating features from the specified layer')
    while (not cfg.test_sets.is_finish()):  # loop over the data
        cfg.test_sets.load_next_partition(cfg.test_xy)
        
        if batch_size == -1:
            output = extract_func(cfg.test_x.get_value())
            output_mats.append(output)  
        else: 
            batch_num = int(math.ceil(1.0 * cfg.test_sets.cur_frame_num / batch_size))