예제 #1
0
__author__ = "Heejung Jung, Xiaochun Han, Deepanshi Shokeen"
__version__ = "1.0.1"
__email__ = "*****@*****.**"
__status__ = "Production"

# 0. parameters ____________________________________________________________
# main_dir = '/Users/h/Documents/projects_local/cluster_projects'
main_dir = '/dartfs-hpc/scratch/psyc164/groupXHD'
sub_name = sys.argv[1]
hemisphere = sys.argv[2]
task_list = ['beh', 'tax']
radii = 10.0

# 1. create pymvpa dataset  ____________________________________________________________
ds_q2 = generate_dataset.create_dataset(sub_name, main_dir, task_list,
                                        hemisphere)
ds_q2.sa['chunks'] = ds_q2.sa['beh']
ds_q2.sa['targets'] = ds_q2.sa['tax']
#del ds_q2.sa['intents']
del ds_q2.sa['stats']
mv.zscore(ds_q2, chunks_attr='chunks')

n_medial = {'lh': 3486, 'rh': 3491}
medial_wall = np.where(np.sum(ds_q2.samples == 0, axis=0) == 200)[0].tolist()
cortical_vertices = np.where(
    np.sum(ds_q2.samples == 0, axis=0) < 200)[0].tolist()
assert len(medial_wall) == n_medial[hemisphere]
n_vertices = ds_q2.fa.node_indices.shape[0]
assert len(medial_wall) + len(cortical_vertices) == n_vertices

# 2. cross validation __________________________________________________________________
예제 #2
0
# %%

import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
import pdb
import generate_dataset as gd
import data_plot as dp

x_train, y_train, x_test, y_test = gd.create_dataset()

# %%
input_dim = 28 * 28
output_dim = 28 * 28
units = 128


def build_LSTM_model():

    lstm_layer = keras.layers.RNN(keras.layers.LSTMCell(units),
                                  input_shape=(None, input_dim))
    model = keras.models.Sequential([
        lstm_layer,
        keras.layers.Dense(output_dim),
    ])
    return model


def build_RNN_model():