예제 #1
0
def EditModels(args):

    # Common changes in all models
    model_files = glob.glob("models/*.pbtxt")
    for model_file in model_files:
        model = util.ReadModel(model_file)
        model.hyperparams.base_epsilon = args.base_epsilon
        model.hyperparams.epsilon_decay = deepnet_pb2.Hyperparams.INVERSE_T \
                if args.epsilon_decay else deepnet_pb2.Hyperparams.NONE
        model.hyperparams.sparsity = args.sparsity
        model.hyperparams.dropout = args.dropout
        model.hyperparams.l2_decay = args.l2_decay
        model.hyperparams.initial_momentum = args.initial_momentum
        model.hyperparams.final_momentum = args.final_momentum
        with open(model_file, 'w') as f:
            text_format.PrintMessage(model, f)

    # Specific changes to rbm1
    model_file = os.path.join('models', 'rbm1.pbtxt')
    model = util.ReadModel(model_file)
    for layer in model.layer:
        if layer.name == 'input_layer':
            layer.dimensions = args.input_width
            layer.numlabels = args.input_numlabels
        if layer.name == 'hidden1':
            layer.dimensions = args.hidden1_width
    with open(model_file, 'w') as f:
        text_format.PrintMessage(model, f)

    # Specific changes to rbm2
    model_file = os.path.join('models', 'rbm2.pbtxt')
    model = util.ReadModel(model_file)
    for layer in model.layer:
        if layer.name == 'hidden1':
            layer.dimensions = args.hidden1_width
        if layer.name == 'hidden2':
            layer.dimensions = args.hidden2_width
    with open(model_file, 'w') as f:
        text_format.PrintMessage(model, f)

    # Specific changes in joint
    model_file = os.path.join('models', 'joint.pbtxt')
    model = util.ReadModel(model_file)
    model.prefix = args.model_dir
    for layer in model.layer:
        if layer.name == 'input_layer':
            layer.dimensions = args.input_width
            layer.numlabels = args.input_numlabels
        if layer.name == 'hidden1':
            layer.dimensions = args.hidden1_width
        if layer.name == 'hidden2':
            layer.dimensions = args.hidden2_width

    with open(model_file, 'w') as f:
        text_format.PrintMessage(model, f)
예제 #2
0
def EditModelsDBM(args):
    """ DBM specific only """

    if args.model not in ['dbm', 'lcdbm']:
        raise ValueError('Unknown model {}'.format(args.model))

    # Common changes in all models
    model_files = glob.glob("models/*.pbtxt")
    for model_file in model_files:
        model = util.ReadModel(model_file)
        model.hyperparams.base_epsilon = args.base_epsilon
        model.hyperparams.sparsity = args.sparsity
        model.hyperparams.dropout = args.dropout
        model.hyperparams.l2_decay = args.l2_decay
        model.hyperparams.initial_momentum = args.initial_momentum
        model.hyperparams.final_momentum = args.final_momentum
        with open(model_file, 'w') as f:
            text_format.PrintMessage(model, f)

    # Specific changes to rbm2
    model_file = os.path.join('models', 'rbm2.pbtxt')
    model = util.ReadModel(model_file)
    for layer in model.layer:
        if layer.name == 'hidden1' or layer.name == 'bernoulli_hidden1':
            layer.dimensions = args.hidden1_width
        if layer.name == 'hidden2':
            layer.dimensions = args.hidden2_width
    with open(model_file, 'w') as f:
        text_format.PrintMessage(model, f)

    # Specific changes to joint
    model_file = os.path.join('models', 'joint.pbtxt')
    model = util.ReadModel(model_file)
    model.prefix = args.model_dir
    for layer in model.layer:
        if layer.name == 'input_layer':
            layer.dimensions = args.input_width
            layer.numlabels = args.input_numlabels
        if layer.name == 'bernoulli_hidden1' or layer.name == 'hidden1':
            layer.dimensions = args.hidden1_width
        if layer.name == 'hidden2':
            layer.dimensions = args.hidden2_width

    # Sparsity mask
    if args.model in ['lcdbm']:
        edge = next(e for e in model.edge if e.node1 == 'input_layer' and \
                e.node2 == 'bernoulli_hidden1')
        param = next(p for p in edge.param if p.name == 'weight')
        sparsity_mask_file = param.sparsity_mask
        param.sparsity_mask = os.path.join(args.data_dir, sparsity_mask_file)

    with open(model_file, 'w') as f:
        text_format.PrintMessage(model, f)
예제 #3
0
def main():
  path = sys.argv[1]
  output_file = sys.argv[2]
  #import pdb
  #pdb.set_trace()
  layers = ['ZCR', 'MFCC', 'Spectral', 'Energy',
            'Chroma', 'PLP']
  MS = ['withMS', 'withoutMS']
  predsMap = {}
  f = open(output_file, 'w')
  f.write('\\begin{tabular}{|c|c|c|c|c|c|c} \\hline \n')
  f.write('MS &ZCR &MFCC &Spectral &Energy &Chroma &PLP \\\\ \\hline\n')
  for ms in MS:
    for layer in layers:
      mfile = os.path.join(path, ms, '%s_dnn_BEST' % layer)
      if not os.path.exists(mfile):
        if ms not in predsMap:
          predsMap[ms] = []
        predsMap[ms].append(0.0)
        continue
      model = util.ReadModel(mfile)
      preds = model.test_stat_es.correct_preds/model.test_stat_es.count
      if ms not in predsMap:
        predsMap[ms] = []
      predsMap[ms].append(preds)

  for ms in MS:
	f.write(ms)
	for item in predsMap[ms]:
		f.write('&%.4f'% item)
	f.write('\\hline\n')
  f.write('\\end{tabular}\n')
  f.close()
예제 #4
0
def MakeModels(model_file, output_path):
    model = util.ReadModel(model_file)
    for l in model.layer:
        for p in l.param:
            EditPretrainedModels(p, output_path)
    for e in model.edge:
        for p in e.param:
            EditPretrainedModels(p, output_path)
    util.WritePbtxt(model_file, model)
예제 #5
0
def Convert(model_file, output_file):
    model = util.ReadModel(model_file)
    params = {}
    for l in model.layer:
        for p in l.param:
            params['%s_%s' % (l.name, p.name)] = util.ParameterAsNumpy(p)
    for e in model.edge:
        for p in e.param:
            params['%s_%s_%s' %
                   (e.node1, e.node2, p.name)] = util.ParameterAsNumpy(p)

    scipy.io.savemat(output_file, params, oned_as='column')
def GetPredictions(model_file, train_op_file, output_dir, dataset='test'):
    board = tr.LockGPU()
    model = util.ReadModel(model_file)
    model.layer[0].data_field.test = '%s_data' % dataset

    train_op = util.ReadOperation(train_op_file)
    train_op.verbose = False
    train_op.get_last_piece = True
    train_op.randomize = False

    layernames = ['output_layer']
    ex.ExtractRepresentations(model_file, train_op, layernames, output_dir)
    tr.FreeGPU(board)
예제 #7
0
def main():
    filePath = sys.argv[1]
    outputPath = sys.argv[2]
    classifiers = sorted(glob.glob(os.path.join(filePath, "*BEST")))
    f = open(outputPath, 'w')
    pdb.set_trace()
    for classifier in classifiers:
        baseName = os.path.basename(classifier)
        if "op" in baseName:
            continue
        model = util.ReadModel(classifier)
        testPreds = model.test_stat_es.correct_preds / model.test_stat_es.count
        trainPreds = model.train_stat_es.correct_preds / model.train_stat_es.count
        validPreds = model.best_valid_stat.correct_preds / model.best_valid_stat.count
        f.write(
            '%s train: %.5f valid: %.5f test: %.5f \n' %
            (os.path.basename(classifier), testPreds, trainPreds, validPreds))

    f.close()
예제 #8
0
def change_model(proto, layers=None):
  model = util.ReadModel(proto)
  if layers is None:
    layers = ['image_hidden1', 'image_hidden2', 'image_hidden3',
              'text_hidden1', 'text_hidden2', 'text_hidden3',
              'image_layer', 'text_layer', 'joint_layer',
              'image_tied_hidden', 'text_tied_hidden',
              'image_hidden2_recon', 'text_hidden2_recon',
              'cross_image_hidden2_recon', 'cross_text_hidden2_recon']
  
  for layer in layers:
    try:
      layer_proto = next(lay for lay in model.layer if lay.name == layer)
      layer_proto.dimensions = dimensions
    except StopIteration:
        pass
  
  with open(proto, 'w') as f:
    text_format.PrintMessage(model, f)
예제 #9
0
def EditTrainers(data_dir, model_dir, rep_dir, numsplits):
    tnames = [
        'train_CD_image_layer1.pbtxt', 'train_CD_image_layer2.pbtxt',
        'train_CD_text_layer1.pbtxt', 'train_CD_text_layer2.pbtxt',
        'train_CD_joint_layer.pbtxt'
    ]
    for tname in tnames:
        t_op_file = os.path.join('trainers', 'dbn', tname)
        t_op = util.ReadOperation(t_op_file)
        if 'layer1' in tname:
            t_op.data_proto_prefix = data_dir
        else:
            t_op.data_proto_prefix = rep_dir
        t_op.checkpoint_directory = model_dir
        with open(t_op_file, 'w') as f:
            text_format.PrintMessage(t_op, f)

    t_op_file = os.path.join('trainers', 'classifiers', 'baseclassifier.pbtxt')
    t_op = util.ReadOperation(t_op_file)
    for i in range(1, numsplits + 1):
        t_op_file = os.path.join('trainers', 'classifiers',
                                 'split_%d.pbtxt' % i)
        t_op.data_proto_prefix = rep_dir
        t_op.data_proto = os.path.join('split_%d' % i, 'data.pbtxt')
        t_op.checkpoint_prefix = model_dir
        t_op.checkpoint_directory = os.path.join('classifiers', 'split_%d' % i)
        with open(t_op_file, 'w') as f:
            text_format.PrintMessage(t_op, f)

    # Change prefix in multimodal dbn model
    mnames = ['multimodal_dbn.pbtxt']
    for mname in mnames:
        model_file = os.path.join('models', mname)
        model = util.ReadModel(model_file)
        model.prefix = model_dir
        with open(model_file, 'w') as f:
            text_format.PrintMessage(model, f)
예제 #10
0
def main():
    path = sys.argv[1]
    numsplits = int(sys.argv[2])
    output_file = sys.argv[3]

    layers = [
        'RNA1seq_input', 'RNA1seq_hidden1', 'RNA1seq_hidden2', 'joint_hidden',
        'RNA2seq_hidden2', 'RNA2seq_hidden1', 'RNA2seq_input'
    ]
    maps = {}
    precs = {}
    for i in range(1, numsplits + 1):
        for layer in layers:
            mfile = os.path.join(path, 'split_%d' % i,
                                 '%s_classifier_BEST' % layer)
            model = util.ReadModel(mfile)
            MAP = model.test_stat_es.MAP
            prec50 = model.test_stat_es.prec50
            if layer not in maps:
                maps[layer] = []
            if layer not in precs:
                precs[layer] = []
            maps[layer].append(MAP)
            precs[layer].append(prec50)

    f = open(output_file, 'w')
    f.write('\\begin{tabular}{|l|c|c|} \\hline \n')
    f.write('Layer & MAP & Prec@50 \\\\ \\hline\n')
    for layer in layers:
        lmap = np.array(maps[layer])
        lprec = np.array(precs[layer])
        f.write('%s & %.3f $\\pm$ %.3f & %.3f $\\pm$ %.3f \\\\ \n' %
                (layer, lmap.mean(), lmap.std(), lprec.mean(), lprec.std()))
    f.write('\\hline\n')
    f.write('\\end{tabular}\n')
    f.close()
예제 #11
0
def Convert(args,
            dirpath,
            mat_file,
            dump_npy=False,
            out_file='rbm_mrf',
            model_file=None):
    """ Create the necesarry things"""
    matfile = sio.loadmat(mat_file)

    if args.minfill:
        # get the weight matrix
        weight = np.asarray(matfile['minL'], dtype='float32')
        Pmat = matfile['Pmat']
        weight = weight.dot(Pmat)
        weight = weight.T
    elif args.random:
        # get the weight matrix
        weight = np.asarray(matfile['L'].T, dtype='float32')
        weight[np.abs(weight) < 1e-8] = 0.0
        nnz = np.count_nonzero(weight)
        nNodes = weight.shape[0]
        weight = np.random.randn(nNodes**2, 1)
        rangeIdx = np.arange(nNodes**2)
        np.random.shuffle(rangeIdx)
        weight[rangeIdx[nnz:]] = 0.
        weight = weight.reshape(nNodes, nNodes)
        print("nnz: {}".format(nnz))
    elif args.thresh:
        # get both the weight materices
        if not -0.00001 < args.thresh < 100.000001:
            raise ValueError("Threshold should be b/w 0 and 1")

        weight = np.asarray(matfile['L'].T, dtype='float32')
        weight_minfill = np.asarray(matfile['minL'], dtype='float32')
        nnz = np.sum(np.abs(weight) > 1e-10)
        nnz_minfill = np.sum(np.abs(weight_minfill) > 1e-10)
        num_to_delete = int((nnz - nnz_minfill) * args.thresh / 100.)
        weight_nnz = np.abs(weight[np.abs(weight) > 1e-10])
        threshold = np.sort(weight_nnz)[num_to_delete]
        weight[np.abs(weight) < threshold] = 0.0
    else:
        weight = np.asarray(matfile['L'].T, dtype='float32')

    nFeats, _ = weight.shape
    diag = np.ones([nFeats, 1]) * matfile['min_eig'] * (1 + matfile['alpha'])
    diag = np.asarray(diag, dtype='float32')

    if dump_npy:
        if args.edge_input_file:
            edge_file = os.path.join(dirpath, args.edge_input_file)
        else:
            edge_file = os.path.join(dirpath, 'edge_input_to_gaussian.npy')
        diag_file = os.path.join(dirpath, 'diag_gaussian.npy')
        np.save(edge_file, weight)
        np.save(diag_file, diag)
    else:
        model = util.ReadModel(model_file)
        proto_weight = next(param for param in model.edge[0].param
                            if param.name == 'weight')
        proto_weight.mat = util.NumpyAsParameter(weight)
        proto_weight.dimensions.extend(weight.shape)

        input_layer = next(l for l in model.layer if l.name == 'input_layer')
        proto_diag = next(param for param in input_layer.param
                          if param.name == 'diag')
        proto_diag.mat = util.NumpyAsParameter(diag)
        proto_diag.dimensions.extend(diag.shape)

        out_file = os.path.join(dirpath, out_file)
        f = gzip.open(out_file, 'wb')
        f.write(model.SerializeToString())
        f.close()
예제 #12
0
def main():
    model_file = sys.argv[1]
    base_output_dir = sys.argv[2]
    rep_dir = sys.argv[3]
    prefix = sys.argv[4]
    gpu_mem = sys.argv[5]
    main_mem = sys.argv[6]
    model = util.ReadModel(model_file)
    data_pb = deepnet_pb2.Dataset()
    data_pb.name = model.name
    data_pb.gpu_memory = gpu_mem
    data_pb.main_memory = main_mem
    output_dir = os.path.join(base_output_dir, 'validation')
    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)
    output_proto_file = os.path.join(base_output_dir, 'data.pbtxt')

    # IMAGE PATHWAY
    img_input_pbtxt = os.path.join(prefix, 'flickr.pbtxt')
    img_hidden1_pbtxt = os.path.join(rep_dir, 'image_rbm1_LAST', 'data.pbtxt')
    img_hidden2_pbtxt = os.path.join(rep_dir, 'image_rbm2_LAST', 'data.pbtxt')

    # TEXT PATHWAY
    text_input_pbtxt = os.path.join(prefix, 'flickr_nnz.pbtxt')
    text_hidden1_pbtxt = os.path.join(rep_dir, 'text_rbm1_LAST', 'data.pbtxt')
    text_hidden2_pbtxt = os.path.join(rep_dir, 'text_rbm2_LAST', 'data.pbtxt')
    text_pbtxt_z = os.path.join(rep_dir, 'generated_text', 'data.pbtxt')

    joint_pbtxt = os.path.join(rep_dir, 'joint_rbm_LAST', 'data.pbtxt')

    img_input_pb = util.ReadData(img_input_pbtxt)
    data = next(d for d in img_input_pb.data if d.name == 'image_labelled')
    data.file_pattern = os.path.join(img_input_pb.prefix, data.file_pattern)
    data.stats_file = os.path.join(img_input_pb.prefix, data.stats_file)
    data.name = 'image_input'
    data_pb.data.extend([data])

    img_hidden1_pb = util.ReadData(img_hidden1_pbtxt)
    data = next(d for d in img_hidden1_pb.data
                if d.name == 'image_hidden1_validation')
    data.file_pattern = os.path.join(img_hidden1_pb.prefix, data.file_pattern)
    data.name = 'image_hidden1'
    data_pb.data.extend([data])

    img_hidden2_pb = util.ReadData(img_hidden2_pbtxt)
    data = next(d for d in img_hidden2_pb.data
                if d.name == 'image_hidden2_validation')
    data.file_pattern = os.path.join(img_hidden2_pb.prefix, data.file_pattern)
    data.name = 'image_hidden2'
    data_pb.data.extend([data])

    indices_file = os.path.join(prefix, 'text', 'indices_labelled.npz')
    indices = np.load(indices_file)
    nnz_indices = indices['nnz_indices']
    z_indices = indices['z_indices']

    text_pb_z = util.ReadData(text_pbtxt_z)
    text_input_pb = util.ReadData(text_input_pbtxt)
    data_nnz = next(d for d in text_input_pb.data if d.name == 'text_labelled')
    data_z = next(d for d in text_pb_z.data
                  if d.name == 'text_input_layer_validation')
    output_file = os.path.join(output_dir, 'text_input-00001-of-00001.npy')
    data = Merge(data_nnz, data_z, nnz_indices, z_indices, text_pb_z.prefix,
                 text_input_pb.prefix, 'text_input', output_file)
    data_pb.data.extend([data])

    text_hidden1_pb = util.ReadData(text_hidden1_pbtxt)
    data_nnz = next(d for d in text_hidden1_pb.data
                    if d.name == 'text_hidden1_validation')
    data_z = next(d for d in text_pb_z.data
                  if d.name == 'text_hidden1_validation')
    output_file = os.path.join(output_dir, 'text_hidden1-00001-of-00001.npy')
    data = Merge(data_nnz, data_z, nnz_indices, z_indices, text_pb_z.prefix,
                 text_hidden1_pb.prefix, 'text_hidden1', output_file)
    data_pb.data.extend([data])

    text_hidden2_pb = util.ReadData(text_hidden2_pbtxt)
    data_nnz = next(d for d in text_hidden2_pb.data
                    if d.name == 'text_hidden2_validation')
    data_z = next(d for d in text_pb_z.data
                  if d.name == 'text_hidden2_validation')
    output_file = os.path.join(output_dir, 'text_hidden2-00001-of-00001.npy')
    data = Merge(data_nnz, data_z, nnz_indices, z_indices, text_pb_z.prefix,
                 text_hidden2_pb.prefix, 'text_hidden2', output_file)
    data_pb.data.extend([data])

    joint_pb = util.ReadData(joint_pbtxt)
    data_nnz = next(d for d in joint_pb.data
                    if d.name == 'joint_hidden_validation')
    data_z = next(d for d in text_pb_z.data
                  if d.name == 'joint_hidden_validation')
    output_file = os.path.join(output_dir, 'joint_hidden-00001-of-00001.npy')
    data = Merge(data_nnz, data_z, nnz_indices, z_indices, text_pb_z.prefix,
                 joint_pb.prefix, 'joint_hidden', output_file)
    data_pb.data.extend([data])

    with open(output_proto_file, 'w') as f:
        text_format.PrintMessage(data_pb, f)
예제 #13
0
from deepnet import util
from deepnet import visualize
import sys

m = util.ReadModel(sys.argv[1])
w = util.ParameterAsNumpy(m.edge[0].param[0])
pvh = visualize.display_convw2(w, 5, 8, 8, 1)
raw_input('Press enter.')

예제 #14
0
def main():
  model_file = sys.argv[1]
  base_output_dir = sys.argv[2]
  rep_dir = sys.argv[3]
  prefix = sys.argv[4]
  gpu_mem = sys.argv[5]
  main_mem = sys.argv[6]
  model = util.ReadModel(model_file)
  data_pb = deepnet_pb2.Dataset()
  data_pb.name = model.name
  data_pb.gpu_memory = gpu_mem
  data_pb.main_memory = main_mem
  output_dir = os.path.join(base_output_dir, 'validation')
  if not os.path.isdir(output_dir):
    os.makedirs(output_dir)
  output_proto_file = os.path.join(base_output_dir, 'data.pbtxt')

  # IMAGE PATHWAY
  img_input_pbtxt = os.path.join(prefix, 'RNAseq.pbtxt')
  img_hidden1_pbtxt = os.path.join(rep_dir, 'RNA1seq_rbm1_LAST', 'data.pbtxt')
  #img_hidden2_pbtxt = os.path.join(rep_dir, 'RNA1seq_rbm2_LAST', 'data.pbtxt')
 
  # TEXT PATHWAY
  text_input_pbtxt = os.path.join(prefix, 'RNAseq.pbtxt')
  text_hidden1_pbtxt = os.path.join(rep_dir, 'RNA2seq_rbm1_LAST', 'data.pbtxt')
  #text_hidden2_pbtxt = os.path.join(rep_dir, 'RNA2seq_rbm2_LAST', 'data.pbtxt')
  #text_pbtxt_z = os.path.join(rep_dir, 'generated_text', 'data.pbtxt')
  
  joint_pbtxt = os.path.join(rep_dir, 'joint_rbm_LAST', 'data.pbtxt')
  joint2_pbtxt = os.path.join(rep_dir, 'joint_rbm2_LAST', 'data.pbtxt')

  
  img_input_pb = util.ReadData(img_input_pbtxt)
  data = next(d for d in img_input_pb.data if d.name == 'RNA1seq_train')
  data.file_pattern = os.path.join(img_input_pb.prefix, data.file_pattern)
  #data.stats_file = os.path.join(img_input_pb.prefix, data.stats_file)
  data.name = 'RNA1seq_input'
  data_pb.data.extend([data])

  img_hidden1_pb = util.ReadData(img_hidden1_pbtxt)
  data = next(d for d in img_hidden1_pb.data if d.name == 'RNA1seq_hidden1_train')
  data.file_pattern = os.path.join(img_hidden1_pb.prefix, data.file_pattern)
  data.name = 'RNA1seq_hidden1'
  data_pb.data.extend([data])

  #img_hidden2_pb = util.ReadData(img_hidden2_pbtxt)
  #data = next(d for d in img_hidden2_pb.data if d.name == 'RNA1seq_hidden2_train')
  #data.file_pattern = os.path.join(img_hidden2_pb.prefix, data.file_pattern)
  #data.name = 'RNA1seq_hidden2'
  #data_pb.data.extend([data])
  
  #indices_file = os.path.join(prefix, 'text', 'indices_labelled.npz')
  #indices = np.load(indices_file)
  #nnz_indices = indices['nnz_indices']
  #z_indices = indices['z_indices']

  #text_pb_z = util.ReadData(text_pbtxt_z)
  text_input_pb = util.ReadData(text_input_pbtxt)
  data = next(d for d in text_input_pb.data if d.name == 'RNA2seq_train')
  data.file_pattern = os.path.join(text_input_pb.prefix, data.file_pattern)
  data.name = 'RNA2seq_input'
  data_pb.data.extend([data])

  text_hidden1_pb = util.ReadData(text_hidden1_pbtxt)
  data = next(d for d in text_hidden1_pb.data if d.name == 'RNA2seq_hidden1_train')
  data.file_pattern = os.path.join(text_hidden1_pb.prefix, data.file_pattern)
  data.name = 'RNA2seq_hidden1'
  data_pb.data.extend([data])

  #text_hidden2_pb = util.ReadData(text_hidden2_pbtxt)
  #data = next(d for d in text_hidden2_pb.data if d.name == 'RNA2seq_hidden2_train')
  #data.file_pattern = os.path.join(text_hidden2_pb.prefix, data.file_pattern)
  #data.name = 'RNA2seq_hidden2'
  #data_pb.data.extend([data])

  joint_pb = util.ReadData(joint_pbtxt)
  data = next(d for d in joint_pb.data if d.name == 'joint_hidden_train')
  data.file_pattern = os.path.join(joint_pb.prefix, data.file_pattern)
  data.name = 'joint_hidden'
  data_pb.data.extend([data])

  joint2_pb = util.ReadData(joint2_pbtxt)
  data = next(d for d in joint2_pb.data if d.name == 'joint_hidden2_train')
  data.file_pattern = os.path.join(joint2_pb.prefix, data.file_pattern)
  data.name = 'joint_hidden2'
  data_pb.data.extend([data])

  with open(output_proto_file, 'w') as f:
    text_format.PrintMessage(data_pb, f)
예제 #15
0
def EditModels(args):

    # Common changes in all models
    model_files = glob.glob("models/*.pbtxt")
    for model_file in model_files:
        model = util.ReadModel(model_file)
        model.hyperparams.base_epsilon = args.base_epsilon
        model.hyperparams.sparsity = args.sparsity
        model.hyperparams.dropout = args.dropout
        model.hyperparams.l2_decay = args.l2_decay
        model.hyperparams.initial_momentum = args.initial_momentum
        model.hyperparams.final_momentum = args.final_momentum
        with open(model_file, 'w') as f:
            text_format.PrintMessage(model, f)

        if args.model == 'sprbm':
            model.hyperparams.sparsity = True

        if args.model == 'droprbm':
            model.hyperparams.dropout = True

    # Specific changes to rbm
    model_file = os.path.join('models', 'rbm.pbtxt')
    model = util.ReadModel(model_file)
    for layer in model.layer:

        if layer.name == 'input_layer':
            layer.dimensions = args.input_width
            layer.numlabels = args.input_numlabels
        if args.model in ('rbm', 'sprbm', 'droprbm'):
            if layer.name == 'bernoulli_hidden1':
                layer.dimensions = args.hidden1_width
        elif args.model in ('lcrbm', 'ghrbm'):
            if layer.name == 'gaussian_hidden1':
                layer.dimensions = args.input_width * 21
        elif args.model in ('warmrbm', 'warmlcrbm'):
            if layer.name == 'bernoulli_hidden1':
                layer.dimensions = args.input_width * 21
        elif args.model in ('warmslcrbm'):
            if layer.name == 'bernoulli_hidden1':
                layer.dimensions = args.input_width * 21
            if layer.name == 'bernoulli2_hidden1':
                layer.dimensions = args.bernoulli2_hidden1_width
        elif args.model in ('slcrbm', 'srbm'):
            if layer.name == 'bernoulli_hidden1':
                layer.dimensions = args.hidden1_width
            if layer.name == 'gaussian_hidden1':
                layer.dimensions = args.input_width * 21
        else:
            raise ValueError('Unknown model {}'.format(args.model))

        # Add in the datafiles
    if args.model in ('lcrbm', 'ghrbm', 'slcrbm', 'srbm'):
        edge = next(e for e in model.edge if e.node1 == 'input_layer' and \
                e.node2 == 'gaussian_hidden1')
        param = next(p for p in edge.param if p.name == 'weight')
        pretrained_model_file = param.pretrained_model[0]
        param.pretrained_model[0] = os.path.join(args.data_dir,
                                                 pretrained_model_file)

        layer = next(l for l in model.layer if l.name == 'input_layer')
        param = next(p for p in layer.param if p.name == 'diag')
        pretrained_model_file = param.pretrained_model[0]
        param.pretrained_model[0] = os.path.join(args.data_dir,
                                                 pretrained_model_file)

    if args.model in ('warmrbm', 'warmlcrbm', 'warmslcrbm'):
        edge = next(e for e in model.edge if e.node1 == 'input_layer' and \
                e.node2 == 'bernoulli_hidden1')
        param = next(p for p in edge.param if p.name == 'weight')
        pretrained_model_file = param.pretrained_model[0]
        param.pretrained_model[0] = os.path.join(args.data_dir,
                                                 pretrained_model_file)

        layer = next(l for l in model.layer if l.name == 'input_layer')
        param = next(p for p in layer.param if p.name == 'diag')
        pretrained_model_file = param.pretrained_model[0]
        param.pretrained_model[0] = os.path.join(args.data_dir,
                                                 pretrained_model_file)

    if args.model in ('lcrbm', 'slcrbm'):
        edge = next(e for e in model.edge if e.node1 == 'input_layer' and \
                e.node2 == 'gaussian_hidden1')
        param = next(p for p in edge.param if p.name == 'weight')
        sparsity_mask_file = param.sparsity_mask
        param.sparsity_mask = os.path.join(args.data_dir, sparsity_mask_file)

    if args.model in ['warmlcrbm', 'warmslcrbm']:
        edge = next(e for e in model.edge if e.node1 == 'input_layer' and \
                e.node2 == 'bernoulli_hidden1')
        param = next(p for p in edge.param if p.name == 'weight')
        sparsity_mask_file = param.sparsity_mask
        param.sparsity_mask = os.path.join(args.data_dir, sparsity_mask_file)

    with open(model_file, 'w') as f:
        text_format.PrintMessage(model, f)

    with open(model_file, 'w') as f:
        text_format.PrintMessage(model, f)
예제 #16
0
import glob
import copy

rep_tied_lambda = float(sys.argv[1])
loss_factor = 1 - rep_tied_lambda

pre_dir = sys.argv[2]

prefix = os.path.join(pre_dir, 'ae_models')
for model_file in [
        'corr_ae.pbtxt', 'cross_corr_ae.pbtxt', 'full_corr_ae.pbtxt'
]:

    proto = os.path.join(prefix, model_file)

    model = util.ReadModel(proto)

    try:
        text_tied_hidden_proto = next(lay for lay in model.layer
                                      if lay.name == 'text_tied_hidden')
        text_tied_hidden_proto.rep_tied_lambda = rep_tied_lambda
    except StopIteration:
        pass
    try:
        image_tied_hidden_proto = next(lay for lay in model.layer
                                       if lay.name == 'image_tied_hidden')
        image_tied_hidden_proto.rep_tied_lambda = rep_tied_lambda
    except StopIteration:
        pass

    try:
예제 #17
0
        for record in records:
            writer.writerow(record)


if __name__ == '__main__':
    from argparse import ArgumentParser
    parser = ArgumentParser(
        description='Parses results by walking directories')
    parser.add_argument("--outf", type=str, help="Output file")
    parser.add_argument("--mode", type=str, help="html/csv")
    args = parser.parse_args()
    model_paths = walk_dir('.')
    exp_paths = defaultdict(list)

    get_expid = lambda f: f.split("/")[1]
    get_model = lambda f: util.ReadModel(f)
    get_op = lambda f: util.ReadOperation(f)

    no_match = lambda path: all(bool(s not in path) for s in drop_string_list)
    model_paths['BEST'] = filter(no_match, model_paths['BEST'])

    for path in model_paths['BEST']:
        exp_paths[get_expid(path)].append(path)
    exp_paths = dict(exp_paths)

    rows = []
    for exp in exp_paths:
        models = defaultdict(list)
        for f in exp_paths[exp]:
            model_name = os.path.basename(f).split('_')[0]
            models[model_name].append(f)