def GetPredictions(model_file, train_op_file, output_dir, dataset='test'):
    board = tr.LockGPU()
    model = util.ReadModel(model_file)
    model.layer[0].data_field.test = '%s_data' % dataset

    train_op = util.ReadOperation(train_op_file)
    train_op.verbose = False
    train_op.get_last_piece = True
    train_op.randomize = False

    layernames = ['output_layer']
    ex.ExtractRepresentations(model_file, train_op, layernames, output_dir)
    tr.FreeGPU(board)
示例#2
0
def main():
  model_file = sys.argv[1]
  op_file = sys.argv[2]
  output_dir = sys.argv[3]
  data_proto = sys.argv[4]
  if len(sys.argv) > 5:
    gpu_mem = sys.argv[5]
  else:
    gpu_mem = '2G'
  if len(sys.argv) > 6:
    main_mem = sys.argv[6]
  else:
    main_mem = '30G'
  board = tr.LockGPU()
  SampleText(model_file, op_file, output_dir, data_proto, gpu_mem, main_mem)
  tr.FreeGPU(board)
示例#3
0
def main(job_id, params):
    board = trainer.LockGPU()
    prefix = os.getcwd()
    model_file = os.path.join(prefix, 'deepnet_base.pbtxt')
    train_op_file = os.path.join(prefix, 'train.pbtxt')
    eval_op_file = os.path.join(prefix, 'eval.pbtxt')
    model, train_op, eval_op = trainer.LoadExperiment(model_file,
                                                      train_op_file,
                                                      eval_op_file)
    model.name = 'deepnet_%d' % job_id
    ModifyHyperparameters(model, params)
    net = trainer.CreateDeepnet(model, train_op, eval_op)
    net.Train()
    trainer.FreeGPU(board)
    value = GetLastValidationError(net)

    return value
示例#4
0
def main():
    patternfile = sys.argv[1]
    targetfile = sys.argv[2]
    label_file = sys.argv[3]
    output_dir = sys.argv[4]
    statiticsFile = '/data1/ningzhang/flickr/flickr_stats.npz'

    batchsize = 128
    K = 5
    if len(sys.argv) > 5:
        K = sys.argv[5]
    if len(sys.argv) > 6:
        batchsize = sys.argv[6]
    else:
        gpu_mem = '2G'
    if len(sys.argv) > 6:
        main_mem = sys.argv[6]
    else:
        main_mem = '30G'
    import pdb
    pdb.set_trace()
    board = tr.LockGPU()
    targets = np.load(targetfile)
    patterns = np.load(patternfile)
    labels = np.load(label_file)
    stats = np.load(statiticsFile)
    dist, minDist_indices, neibor_labels = Knn(patterns, targets, batchsize, K,
                                               labels, stats)

    dist_dir = os.path.join(output_dir, 'distance')
    indices_dir = os.path.join(output_dir, 'indices')
    labels_dir = os.path.join(output_dir, 'labels')
    np.save(dist_dir, dist)
    np.save(indices_dir, minDist_indices)
    np.save(labels_dir, neibor_labels)
    sio.savemat(os.path.join(output_dir, 'distance_mat'), {'distance': dist})
    sio.savemat(os.path.join(output_dir, 'indices_mat'),
                {'indices': minDist_indices})
    sio.savemat(os.path.join(output_dir, 'labels_mat'),
                {'labels': neibor_labels})
    tr.FreeGPU(board)
示例#5
0
            elif args.infer_method == 'exact':
                pll, imperr = impute_rbm_exact(model)
            elif args.infer_method == 'gaussian_exact':
                pll, imperr = impute_rbm_gaussian_exact(model)
            else:
                raise ValueError("Unknown infer method")

            pll, imperr = pll.flatten(), imperr.flatten()
            pll_data[data_type].append(pll)
            imperr_data[data_type].append(imperr)

        pll_data[data_type] = np.concatenate(pll_data[data_type])
        imperr_data[data_type] = np.concatenate(imperr_data[data_type])

    #-------------------------------------------------------------------
    # Print and save the results
    for dtype in pll_data:
        pll = pll_data[dtype]
        imperr = imperr_data[dtype]
        print '%s : Pseudo-LogLikelihood %.5f, std %.5f' % (dtype, pll.mean(),
                                                            pll.std())
        print '%s : Imputation Error %.5f, std %.5f' % (dtype, imperr.mean(),
                                                        imperr.std())

    tr.FreeGPU(board)

    import pickle
    with open(args.outf, 'wb') as fout:
        pkldata = {'pll': pll_data, 'imperr': imperr_data}
        pickle.dump(pkldata, fout)
示例#6
0
文件: knn.py 项目: gitxchen/DeepEAR
def main():
    patternFilePattern = sys.argv[1]
    targetFilePattern = sys.argv[2]

    output_dir = sys.argv[3]
    if len(sys.argv) > 4:
        label_file = sys.argv[4]
    statiticsFile = '/data1/ningzhang/flickr/flickr_stats.npz'

    batchsize = 1000
    K = 5
    if len(sys.argv) > 5:
        K = sys.argv[5]
    if len(sys.argv) > 6:
        batchsize = sys.argv[6]
    else:
        gpu_mem = '2G'
    if len(sys.argv) > 6:
        main_mem = sys.argv[6]
    else:
        main_mem = '30G'
    import pdb
    pdb.set_trace()
    board = tr.LockGPU()
    patternFiles = sorted(glob.glob(patternFilePattern))
    targetFiles = sorted(glob.glob(targetFilePattern))

    stats = np.load(statiticsFile)
    patternlist = []
    m = 0
    for i, patternFile in enumerate(patternFiles):
        patternlist.append(np.load(patternFile))
        m += patternlist[i].shape[0]
    patterns = np.zeros((m, patternlist[0].shape[1]))
    pos = 0
    for patternShark in patternlist:
        patterns[pos:pos + patternShark.shape[0], :] = patternShark
        pos = pos + patternShark.shape[0]
    pos = 0
    dist_pool = np.zeros((1, 2 * K))

    if len(sys.argv) > 4:
        labels = np.load(label_file)
    for targetFile in targetFiles:
        targets = np.load(targetFile)
        if len(sys.argv) > 4:
            dist_interm, minDist_indices_interm, neibor_labels_interm = Knn(
                patterns, targets, batchsize, K, labels, stats)
        else:
            dist_interm, minDist_indices_interm = Knn(patterns, targets,
                                                      batchsize,
                                                      K)  #, stats = stats)

        if pos == 0:
            dist = np.copy(dist_interm)
            minDist_indices = np.copy(minDist_indices_interm)
            if len(sys.argv) > 4:
                neibor_labels = np.copy(neibor_labels_interm)
        else:
            K_new = K if K <= targets.shape[0] else targets.shape[0]
            if K_new < K:
                dist_pool = np.zeros((1, K + K_new))
            for ind_1 in range(m):
                dist_pool[0, 0:K] = dist[ind_1, 0:K]
                dist_pool[0, K:K + K_new] = dist_interm[ind_1, 0:K_new]
                internal_compare = dist_pool.argsort().flatten()
                dist[ind_1, :] = dist_pool[0, internal_compare[0:K]]
                for j in range(K):
                    minDist_indices[ind_1, j] = minDist_indices[
                        ind_1, j] if internal_compare[
                            j] < K else minDist_indices_interm[ind_1, j] + pos
                    if len(sys.argv) > 4:
                        neibor_labels[ind_1, j] = labels[minDist_indices[ind_1,
                                                                         j], :]

        pos = pos + targets.shape[0]

    dist_dir = os.path.join(output_dir, 'distance')
    indices_dir = os.path.join(output_dir, 'indices')
    labels_dir = os.path.join(output_dir, 'labels')
    np.save(dist_dir, dist)
    np.save(indices_dir, minDist_indices)
    np.save(labels_dir, neibor_labels)
    sio.savemat(os.path.join(output_dir, 'distance_mat'), {'distance': dist})
    sio.savemat(os.path.join(output_dir, 'indices_mat'),
                {'indices': minDist_indices})
    sio.savemat(os.path.join(output_dir, 'labels_mat'),
                {'labels': neibor_labels})
    tr.FreeGPU(board)