Пример #1
0
def run_BD(i):
    tf.reset_default_graph()
    data = DataReader.read_BD()
    ncells = configs[i]['cells']
    learning_rate = configs[i]['lr']
    group = configs[i]['g']
    iters = configs[i]['iters']
    model_path = configs[i]['model_path']
    output_path = Paths.local_path + 'BD/rnn-opt-rand-init/' + 'run_' + str(
        configs[i]['s']) + '/' + str(ncells) + 'cells/' + group + '/'
    with LogFile(output_path, 'run.log'):
        DLogger.logger().debug("group: " + str(group))
        gdata = data.loc[data.diag == group]
        ids = gdata['id'].unique().tolist()
        dftr = pd.DataFrame({'id': ids, 'train': 'train'})
        tdftr = pd.DataFrame({'id': ids, 'train': 'test'})
        train, test = DataProcess.train_test_between_subject(
            gdata, pd.concat((dftr, tdftr)),
            [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
        train = DataProcess.merge_data(train)
        DLogger.logger().debug("total points: " + str(get_total_pionts(train)))

        worker = LSTMBeh(2, 0, n_cells=ncells)
        OptBEH.optimise(worker,
                        output_path,
                        train,
                        None,
                        learning_rate=learning_rate,
                        global_iters=iters,
                        load_model_path=model_path)
Пример #2
0
def run_BD_Q(i):
    tf.reset_default_graph()

    learning_rate = configs[i]['lr']
    group = configs[i]['g']
    cv_index = configs[i]['cv_index']

    indx_data = cv_lists_group[group][cv_index]
    gdata = data.loc[data.diag == group]
    train, test = DataProcess.train_test_between_subject(
        gdata, indx_data, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])

    output_path = Paths.local_path + 'BD/gql10d-ml-cv/' + group + '/' + 'fold' + str(
        cv_index) + '/'
    with LogFile(output_path, 'run.log'):
        indx_data.to_csv(output_path + 'train_test.csv')

        worker = GQL.get_instance(2, 10, {})
        train = DataProcess.merge_data(train)

        OptML.optimise(worker,
                       output_path,
                       train,
                       test,
                       learning_rate=learning_rate,
                       global_iters=1000)
Пример #3
0
def run_BD_RNN(i):
    tf.reset_default_graph()
    ncells = configs[i]['cells']
    learning_rate = configs[i]['lr']
    group = configs[i]['g']
    cv_index = configs[i]['cv_index']

    output_path = Paths.local_path + 'BD/rnn-cv/' + str(
        ncells) + 'cells/' + group + '/' + 'fold' + str(cv_index) + '/'
    with LogFile(output_path, 'run.log'):
        indx_data = cv_lists_group[group][cv_index]
        gdata = data.loc[data.diag == group]
        train, test = DataProcess.train_test_between_subject(
            gdata, indx_data, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])

        indx_data.to_csv(output_path + 'train_test.csv')
        train_merged = DataProcess.merge_data(train)
        DLogger.logger().debug("total points: " +
                               str(get_total_pionts(train_merged)))
        del train

        worker = LSTMBeh(2, 0, n_cells=ncells)
        OptBEH.optimise(worker,
                        output_path,
                        train_merged,
                        None,
                        learning_rate=learning_rate,
                        global_iters=3000,
                        load_model_path='../inits/rnn-init/' + str(ncells) +
                        'cells/model-final/')
Пример #4
0
def run_BD(i):
    tf.reset_default_graph()

    data = DataReader.read_BD()
    learning_rate = configs[i]['lr']
    group = configs[i]['g']
    output_path = Paths.local_path + 'BD/gql-ml-opt/' + group + '/'
    with LogFile(output_path, 'run.log'):
        DLogger.logger().debug("group: " + str(group))
        gdata = data.loc[data.diag == group]
        ids = gdata['id'].unique().tolist()
        dftr = pd.DataFrame({'id': ids, 'train': 'train'})
        tdftr = pd.DataFrame({'id': ids, 'train': 'test'})
        train, test = DataProcess.train_test_between_subject(
            gdata, pd.concat((dftr, tdftr)),
            [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
        DLogger.logger().debug("total points: " + str(get_total_pionts(train)))

        worker = GQL.get_instance(2, 2, {})
        train = DataProcess.merge_data(train)
        OptML.optimise(worker,
                       output_path,
                       train,
                       test,
                       global_iters=1000,
                       learning_rate=learning_rate)
Пример #5
0
def run_BD_GQL(i):
    tf.reset_default_graph()
    learning_rate = configs[i]['lr']
    group = configs[i]['g']
    cv_index = configs[i]['cv_index']
    iters = configs[i]['iters']

    output_path = Paths.local_path + 'BD/gql-ml-rand-opt/' + group + '/' + 'fold' + str(
        cv_index) + '/'
    with LogFile(output_path, 'run.log'):
        indx_data = cv_lists_group[group][cv_index]
        gdata = data.loc[data.diag == group]
        train, test = DataProcess.train_test_between_subject(
            gdata, indx_data, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])

        indx_data.to_csv(output_path + 'train_test.csv')
        DLogger.logger().debug("total points: " + str(get_total_pionts(train)))

        worker = GQL.get_instance(2, 2, {})
        train = DataProcess.merge_data(train)

        OptML.optimise(worker,
                       output_path,
                       train,
                       None,
                       learning_rate=learning_rate,
                       global_iters=iters)
Пример #6
0
    def test_and_save(label, output_path, saver, sess, test, worker):
        policies_list = {}
        if test is not None and len(test) > 0:
            DLogger.logger().debug("started testing...")
            output = [['sid', 'pre accu', 'post accu']]
            for k, tr in test.iteritems():

                merged_data = DataProcess.merge_data({'tmp': tr})['merged'][0]
                ell, policies, _ = worker.simulate(sess, merged_data['reward'],
                                                   merged_data['action'])

                policies_list[k] = {}
                post_accu = []
                for i in range(len(tr)):
                    v = tr[i]
                    pol = policies[i, :v['action'].shape[1]]
                    post_accu.append(Assessor.evaluate_fit(pol, v['action']))
                    policies_list[k][v['block']] = pol

                output.append([v['id'], None, post_accu])
            DLogger.logger().debug("finished testing.")

            if output_path is not None:
                Export.write_to_csv(output, output_path, 'accuracy.csv')
                Export.policies(policies_list, output_path, 'policies.csv')

        if output_path is not None:
            vparams = sess.run(worker.get_params())
            params = [worker.get_param_names(), vparams]
            Export.write_to_csv(params, output_path, 'params.csv')

            trans_params = [
                f(v)
                for f, v in zip(worker.get_trans_func(), worker.get_params())
            ]

            output = sess.run(trans_params)
            params = [worker.get_param_names(), output]
            Export.write_to_csv(params, output_path, 'params-trans.csv')

        if output_path is not None and saver is not None:
            model_path = output_path + 'model-' + str(label) + '/'
            ensure_dir(model_path)
            saver.save(sess, model_path + 'model.cptk')
            train_writer = tf.summary.FileWriter(model_path)
            train_writer.add_graph(sess.graph)
            train_writer.close()

        return policies_list
Пример #7
0
def run_BD(i):
    data = DataReader.read_BD()
    ncells = configs[i]['cells']
    group = configs[i]['g']
    input_path = Paths.rest_path + 'archive/beh/rnn-opt-rand-init/' + 'run_' + \
                 str(configs[i]['s']) + '/' + str(ncells) + 'cells/' + group + '/model-final/'
    output_path = Paths.local_path + 'BD/rnn-opt-rand-init-evals/' + 'run_' + \
                  str(configs[i]['s']) + '/' + str(ncells) + 'cells/' + group + '/'

    gdata = data.loc[data.diag == group]
    ids = gdata['id'].unique().tolist()
    dftr = pd.DataFrame({'id': ids, 'train': 'train'})
    tdftr = pd.DataFrame({'id': ids, 'train': 'test'})
    train, test = DataProcess.train_test_between_subject(
        gdata, pd.concat((dftr, tdftr)),
        [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])

    test = DataProcess.merge_data(test)
    tf.reset_default_graph()
    worker = LSTMBeh(2, 0, ncells)
    saver = tf.train.Saver(max_to_keep=None)

    with tf.Session() as sess:
        DLogger.logger().debug("loading model from: " + str(input_path))
        ckpt = tf.train.get_checkpoint_state(input_path)
        tf.train.import_meta_graph(input_path + 'model.cptk.meta')
        saver.restore(sess, ckpt.model_checkpoint_path)

        for k, tr in test.iteritems():
            for v in tr:
                _, _, _, ell = worker.simulate(sess, v['reward'], v['action'],
                                               v['state'])

    DLogger.logger().debug("input path: " + input_path)
    DLogger.logger().debug("output path: " + output_path)
    DLogger.logger().debug("total nlp: {} ".format(str(ell)))

    return pd.DataFrame({
        'total nlp': [ell],
        'group': group,
        'cell': ncells,
        'fold': None,
        'model_iter': 'model-final',
        's': configs[i]['s']
    })
Пример #8
0
def run_BD_RNN(i):

    tf.reset_default_graph()
    ncells = configs[i]['cells']
    lr = configs[i]['lr']
    output_path = Paths.local_path + 'BD/rnn-init/' + str(ncells) + 'cells/'
    with LogFile(output_path, 'run.log'):

        ids = data['id'].unique().tolist()
        dftr = pd.DataFrame({'id': ids, 'train': 'train'})
        train, test = DataProcess.train_test_between_subject(
            data, dftr, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
        train = DataProcess.merge_data(train)

        DLogger.logger().debug("total points: " + str(get_total_pionts(train)))

        worker = LSTMBeh(2, 0, n_cells=ncells)
        lrh.OptBEH.optimise(worker,
                            output_path,
                            train,
                            None,
                            learning_rate=lr,
                            global_iters=0)