Exemplo n.º 1
0
    feature_len = js['feature']

    reference = js["nodes_reference"]
    base_nodes = map(int, js["nodes_base"].split(','))
    stack_nodes = map(int, js["nodes_stack"].split(','))
    final_nodes = []  #map(int, js["nodes_final"].split(','))

    num_att = 4
    num_output = 3

    renetFile = None
    if 'retrain' in js:
        renetFile = HOME + 'NNs/' + js['retrain'] + '.p'

    if mode == 'train':
        tr = DataSet(tr_data, js['block'], feature_len)
        # tr.set_t_scale(t_scale)
        tr.set_num_output(num_output)
        te = DataSet(te_data, js['block'], feature_len)
        # te.set_t_scale(t_scale)
        te.set_num_output(num_output)
    else:
        if mode == 'te':
            te = DataSet(te_data, js['block'], feature_len)
        else:
            te = DataSet(tr_data, js['block'], feature_len)
        te.set_num_output(num_output)

    sz_in = te.sz
    loop = js['loop']
    print "input shape", sz_in, "LR", lr, 'feature', feature_len
Exemplo n.º 2
0
    netFile = HOME + 'NNs/' + js['net'] + '/fc'

    batch_size = js['batch_size']
    feature_len = js['feature']
    lr = js['lr']
    stack = js['stack']
    num_output = js["num_output"]
    step = js["step"]
    stage = js["stage"]

    renetFile = None
    if 'retrain' in js:
        renetFile = HOME + 'NNs/' + js['retrain'] + '/fc'

    tr = DataSet(tr_data, batch_size, feature_len + stack)
    te = DataSet(te_data, batch_size, feature_len + stack)

    att = te.sz[1]
    iterations = 10000
    loop = 40
    print "input attribute", att, "LR", lr, 'feature', feature_len

    inputs = {}

    inputs[0] = tf.placeholder(tf.float32, [None, feature_len * att])
    output = tf.placeholder(tf.float32, [None, num_output])
    for a in range(stack):
        inputs[a + 1] = tf.placeholder(tf.float32, [None, att])

    input_dic = {}
Exemplo n.º 3
0
    js = Utils.load_json_file(config_file, False)

    te_data = []
    for key in js:
        if key.startswith(data_type):
            te_data.append(HOME + js[key])

    netFile = HOME + 'NNs/' + js['netTest'] + '/fc'

    batch_size = js['batch_size']
    feature_len = js['feature']
    stack = js['stack']
    num_output = js["num_output"]

    te = DataSet(te_data, batch_size, feature_len + stack)

    att = te.sz[1]

    print "input shape", att, 'feature', feature_len

    inputs = {}

    inputs[0] = tf.placeholder(tf.float32, [None, feature_len * att])
    output = tf.placeholder(tf.float32, [None, num_output])
    for a in range(stack):
        inputs[a + 1] = tf.placeholder(tf.float32, [None, att])

    input_dic = {}
    for a in range(stack + 1):
        input_dic['input{}'.format(a)] = inputs[a]
Exemplo n.º 4
0
    netFile = HOME + 'NNs/' + js['net'] + '/p1'
    batch_size = int(js['batch_size'])
    feature_len = int(js['feature'])
    lr = float(js['lr'])
    step = js['step']

    num_output = 3
    nodes1 = map(int, js["nodes1"].split(','))
    nodes2 = map(int, js["nodes2"].split(','))
    nodes3 = map(int, js["nodes3"].split(','))

    renetFile = None
    if 'retrain' in js:
        renetFile = HOME + 'NNs/' + js['retrain'] + '/p1'

    tr = DataSet(tr_data, batch_size, feature_len + 1)
    te = DataSet(te_data, batch_size, feature_len + 1)

    att = te.sz[1]
    iterations = 100000
    loop = 2
    if step == 0:
        loop = 200
    print "input shape", att, "LR", lr, 'feature', feature_len

    net = P1Net(nodes1, nodes2, nodes3, att, num_output, feature_len, lr)
    net.step = step

    with tf.Session() as sess:
        sess.run(net.init)
        if renetFile:
Exemplo n.º 5
0
if __name__ == '__main__':

    config_file = "rnn_config.json"

    #if len(sys.argv)>1:
    #    config_file = sys.argv[1]

    test = None
    if len(sys.argv) > 1:
        test = sys.argv[1]

    cfg = Config(config_file)

    if test is None:
        tr = DataSet(cfg.tr_data, cfg)
        te = DataSet(cfg.te_data, cfg, sub_sample=0.15)
        tr0 = DataSet([cfg.tr_data[0]], cfg, sub_sample=0.15)
        cfg.att = te.sz[1]
    else:
        if test == 'te':
            te = DataSet([cfg.te_data[0]], cfg)
        else:
            te = DataSet([cfg.tr_data[0]], cfg)
        cfg.att = te.sz[1]

    iterations = 10000
    loop = cfg.loop
    print "input attribute", cfg.att, "LR", cfg.lr, \
        'feature', cfg.feature_len, 'add', cfg.add_len
Exemplo n.º 6
0
    batch_size = int(js['batch_size'])
    feature_len = int(js['feature'])
    lr = float(js['lr'])
    step = js['step']
    nAddition = 10

    num_output = 3
    nodes1 = map(int, js["nodes1"].split(','))
    nodes2 = map(int, js["nodes2"].split(','))
    nodes3 = map(int, js["nodes3"].split(','))

    renetFile = None
    if 'retrain' in js:
        renetFile = HOME + 'NNs/' + js['retrain'] + '/p1'

    tr = DataSet(tr_data, batch_size, feature_len, nadd=nAddition)
    te = DataSet(te_data, batch_size, feature_len, nadd=nAddition)

    att = te.sz[1]
    iterations = 100000
    loop = 100
    # if step==0:
    #    loop=200
    print "input shape", att, "LR", lr, 'feature', feature_len

    net = P1Net(nodes1, nodes2, nodes3, att, num_output, feature_len, lr,
                nAddition)
    net.step = step

    with tf.Session() as sess:
        sess.run(net.init)
Exemplo n.º 7
0
    return pathname + '_' + basename + '_avg.p'


if __name__ == '__main__':

    config_file = "rnn_config_4.json"

    test = None
    if len(sys.argv) > 1:
        test = sys.argv[1]

    cfg = Config(config_file)

    avg_file = avg_file_name(cfg.netFile)
    if test is None:
        tr = DataSet(cfg.tr_data, cfg)
        get_avg_file(tr, avg_file)
        te = DataSet(cfg.te_data, cfg, sub_sample=0.15)
        tr0 = DataSet([cfg.tr_data[0]], cfg, sub_sample=0.1)
        cfg.att = te.sz[1]
        tr.avg_correction(avg_file)
        tr0.avg_correction(avg_file)

    else:
        if test == 'te':
            te = DataSet([cfg.te_data[0]], cfg)
        else:
            te = DataSet([cfg.tr_data[0]], cfg)
        cfg.att = te.sz[1]

    te.avg_correction(avg_file)
Exemplo n.º 8
0
    parser.add_argument('-t', '--test', help='test', required=False)
    args = parser.parse_args()

    config_file = "config.json" if args.config is None else args.config
    mode = 'train' if args.test is None else args.test

    iterations = 10000
    js = Utils.load_json_file(config_file)

    dtype = torch.float
    device = torch.device("cpu")
    # device = torch.device("cuda:0") # Uncomment this to run on GPU

    cfg = Config(config_file)

    tr = DataSet(cfg.tr_data, cfg.memory_size, cfg.feature_len)
    te = DataSet(cfg.te_data, cfg.memory_size, cfg.feature_len)
    tr.set_net_type(cfg.net_type)
    te.set_net_type(cfg.net_type)
    tr.set_t_scale(cfg.t_scale)
    te.set_t_scale(cfg.t_scale)
    tr.set_num_output(cfg.num_output)
    te.set_num_output(cfg.num_output)
    att = te.sz[1]

    D_in = cfg.feature_len * att
    D_out = cfg.num_output

    tr_pre_data = tr.prepare(multi=1)
    for b in tr_pre_data:
        d = torch.from_numpy(b[0]).type(torch.FloatTensor)
Exemplo n.º 9
0
    batch_size = js['batch_size']
    feature_len = js['feature']
    lr = js['lr']
    #stack = js['stack']
    num_output = js["num_output"]
    step = js["step"]
    #stage = js["stage"]
    t_scale = js['t_scale']
    #net_type = js['net_type']

    renetFile = None
    if 'retrain' in js:
        renetFile = HOME + 'NNs/' + js['retrain'] + '/fc'

    tr = DataSet(tr_data, batch_size, feature_len)
    tr0 = DataSet([tr_data[0]], batch_size, feature_len)
    te = DataSet(te_data, batch_size, feature_len)
    tr0.set_t_scale(t_scale)
    tr.set_t_scale(t_scale)
    te.set_t_scale(t_scale)
    tr0.set_num_output(num_output)
    tr.set_num_output(num_output)
    te.set_num_output(num_output)

    att = te.sz[1]
    iterations = 10000
    loop = js["loop"]
    print "input attribute", att, "LR", lr, 'feature', feature_len

    inputs = {}
Exemplo n.º 10
0
        if key.startswith('te'):
            te_data.append(HOME + js['te'])

    netFile = HOME + 'NNs/' + js['net'] + '/fc'

    batch_size = js['batch_size']
    feature_len = js['feature']
    base = js['base']
    step = js["step"]
    loop = 1
    num_output = js['num_output']
    t_scale = js['t_scale']

    renetFile = HOME + 'NNs/' + js['netTest'] + '/fc'

    te = DataSet(te_data, batch_size, feature_len * base)
    te.set_t_scale(t_scale)
    te.set_num_output(num_output)

    att = te.sz[1]
    print "input attribute", att, 'feature', feature_len

    inputs = {}
    input_dic = {}

    output = tf.placeholder(tf.float32, [None, num_output])
    for a in range(base):
        inputs[a] = tf.placeholder(tf.float32, [None, att * feature_len])
        input_dic['input{}'.format(a)] = inputs[a]

    net = PyraNet(input_dic)