Beispiel #1
0
def launch_virtual_subject_processes(nodes, mu_0, virtual_subj_ids, behavioral_param_file, trials, stim_conditions,
                                     start_nodes=True):
    """
    nodes = nodes to run simulation on
    data_dir = directory containing subject data
    num_real_subjects = number of real subjects
    num_virtual_subjects = number of virtual subjects to run
    behavioral_param_file = file containing subject fitted behavioral parameters
    start_nodes = whether or not to start nodes
    """

    # Setup launcher
    launcher=Launcher(nodes)

    wta_params=default_params()
    wta_params.mu_0=mu_0
    wta_params.p_a=wta_params.mu_0/100.0
    wta_params.p_b=wta_params.p_a

    # Get subject alpha and beta values
    f = h5py.File(behavioral_param_file)
    control_group=f['control']
    alpha_vals=np.array(control_group['alpha'])
    beta_vals=np.array(control_group['beta'])

    # For each virtual subject
    for virtual_subj_id in virtual_subj_ids:

        # Sample beta from subject distribution - don't use subjects with high alpha
        beta_hist,beta_bins=np.histogram(beta_vals[np.where(alpha_vals<.99)[0]], density=True)
        bin_width=beta_bins[1]-beta_bins[0]
        beta_bin=np.random.choice(beta_bins[:-1], p=beta_hist*bin_width)
        beta=beta_bin+np.random.rand()*bin_width
        wta_params.background_freq=(beta-161.08)/-.17

        contrast_range=[0.0, .032, .064, .096, .128, .256, .512]
        for i,contrast in enumerate(contrast_range):
            inputs=np.array([wta_params.mu_0+wta_params.p_a*contrast*100.0,
                             wta_params.mu_0-wta_params.p_b*contrast*100.0])
            for t in range(trials):
                np.random.shuffle(inputs)
                for stim_condition,stim_values in stim_conditions.iteritems():
                    sim_params=simulation_params()
                    sim_params.p_dcs=stim_values[0]
                    sim_params.i_dcs=stim_values[1]
                    cmds,log_file_template,out_file=get_wta_cmds(wta_params, inputs, sim_params, contrast, t,
                        record_lfp=True, record_voxel=True, record_neuron_state=False, record_firing_rate=True,
                        record_spikes=True, save_summary_only=False,
                        e_desc='virtual_subject.%d.%s' % (virtual_subj_id,stim_condition))
                    launcher.add_batch_job(cmds, log_file_template=log_file_template, output_file=out_file)

    launcher.post_jobs()

    if start_nodes:
        launcher.set_application_script(os.path.join(SRC_DIR, 'sh/ezrcluster-application-script.sh'))
        launcher.start_nodes()
Beispiel #2
0
def post_wta_jobs(nodes,
                  p_b_e_range,
                  p_x_e_range,
                  p_e_e_range,
                  p_e_i_range,
                  p_i_i_range,
                  p_i_e_range,
                  num_trials,
                  muscimol_amount=0 * nS,
                  injection_site=0,
                  start_nodes=True):
    sim_params = simulation_params()
    sim_params.muscimol_amount = muscimol_amount
    sim_params.injection_site = injection_site

    input_sum = 40.0
    launcher = Launcher(nodes)
    if start_nodes:
        launcher.set_application_script(
            os.path.join(SRC_DIR, 'sh/ezrcluster-application-script.sh'))
        launcher.start_nodes()

    contrast_range = [0.0, 0.0625, 0.125, 0.25, 0.5, 1.0]

    for p_b_e in p_b_e_range:
        for p_x_e in p_x_e_range:
            for p_e_e in p_e_e_range:
                for p_e_i in p_e_i_range:
                    for p_i_i in p_i_i_range:
                        for p_i_e in p_i_e_range:
                            wta_params = default_params()
                            wta_params.p_b_e = p_b_e
                            wta_params.p_x_e = p_x_e
                            wta_params.p_e_e = p_e_e
                            wta_params.p_e_i = p_e_i
                            wta_params.p_i_i = p_i_i
                            wta_params.p_i_e = p_i_e
                            for i, contrast in enumerate(contrast_range):
                                inputs = np.zeros(2)
                                inputs[0] = (input_sum * (contrast + 1.0) /
                                             2.0)
                                inputs[1] = input_sum - inputs[0]
                                for t in range(num_trials):
                                    np.random.shuffle(inputs)
                                    cmds, log_file_template, out_file = get_wta_cmds(
                                        wta_params,
                                        inputs,
                                        sim_params,
                                        contrast,
                                        t,
                                        record_lfp=True,
                                        record_voxel=True,
                                        record_neuron_state=False,
                                        record_firing_rate=True,
                                        record_spikes=True)
                                    launcher.add_job(
                                        cmds,
                                        log_file_template=log_file_template,
                                        output_file=out_file)
Beispiel #3
0
def launch_background_freq_processes(nodes, background_freq_range, trials, start_nodes=True):
    mat_file='/tmp/pySBI/data/rerw/subjects/value1_s1_t2.mat'

    launcher=Launcher(nodes)

    for background_freq in background_freq_range:
        for trial in range(trials):
            cmds, log_file_template, out_file=get_rerw_commands(mat_file, 0*pA, 0*pA, 0*second, 0.4, 5.0,
                background_freq, e_desc='background_freq.%.3f.trial.%d' % (background_freq,trial))
            launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

    if start_nodes:
        launcher.set_application_script(os.path.join(SRC_DIR, 'sh/ezrcluster-application-script.sh'))
        launcher.start_nodes()
Beispiel #4
0
def post_wta_jobs(nodes, p_b_e_range, p_x_e_range, p_e_e_range, p_e_i_range, p_i_i_range, p_i_e_range, num_trials,
                   muscimol_amount=0*nS, injection_site=0, start_nodes=True):
    sim_params=simulation_params()
    sim_params.muscimol_amount=muscimol_amount
    sim_params.injection_site=injection_site

    input_sum=40.0
    launcher=Launcher(nodes)
    if start_nodes:
        launcher.set_application_script(os.path.join(SRC_DIR, 'sh/ezrcluster-application-script.sh'))
        launcher.start_nodes()

    contrast_range=[0.0, 0.0625, 0.125, 0.25, 0.5, 1.0]

    for p_b_e in p_b_e_range:
        for p_x_e in p_x_e_range:
            for p_e_e in p_e_e_range:
                for p_e_i in p_e_i_range:
                    for p_i_i in p_i_i_range:
                        for p_i_e in p_i_e_range:
                            wta_params=default_params()
                            wta_params.p_b_e=p_b_e
                            wta_params.p_x_e=p_x_e
                            wta_params.p_e_e=p_e_e
                            wta_params.p_e_i=p_e_i
                            wta_params.p_i_i=p_i_i
                            wta_params.p_i_e=p_i_e
                            for i,contrast in enumerate(contrast_range):
                                inputs=np.zeros(2)
                                inputs[0]=(input_sum*(contrast+1.0)/2.0)
                                inputs[1]=input_sum-inputs[0]
                                for t in range(num_trials):
                                    np.random.shuffle(inputs)
                                    cmds,log_file_template,out_file=get_wta_cmds(wta_params, inputs, sim_params,
                                        contrast, t, record_lfp=True, record_voxel=True, record_neuron_state=False,
                                        record_firing_rate=True, record_spikes=True)
                                    launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)
Beispiel #5
0
def launch_baseline_virtual_subject_processes(nodes, data_dir, num_real_subjects, virtual_subj_ids, start_nodes=True):

    alpha_range=(0.0, 1.0)
    beta_range=(1.69,6.63)

    # Setup launcher
    launcher=Launcher(nodes)

    # For each virtual subject
    for virtual_subj_id in virtual_subj_ids:

        # Choose an actual subject
        stim_file_name=None
        control_file_name=None
        while True:
            i=np.random.choice(range(num_real_subjects))
            subj_id=i+1
            subj_stim_session_number=stim_order[i,LAT]
            stim_file_name='value%d_s%d_t2.mat' % (subj_id,subj_stim_session_number)
            subj_control_session_number=stim_order[i,NOSTIM1]
            control_file_name='value%d_s%d_t2.mat' % (subj_id,subj_control_session_number)
            if os.path.exists(os.path.join(data_dir,stim_file_name)) and\
               os.path.exists(os.path.join(data_dir,control_file_name)):
                break

        # Sample alpha from subject distribution
        alpha=alpha_range[0]+np.random.rand()*(alpha_range[1]-alpha_range[0])

        # Sample beta from subject distribution - don't use subjects with high alpha
        beta=beta_range[0]+np.random.rand()*(beta_range[1]-beta_range[0])

        cmds, log_file_template, out_file=get_rerw_commands(control_file_name, 0*pA, 0*pA, 0*second, alpha, beta, None,
            e_desc='baseline.virtual_subject.%d.control' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

        cmds, log_file_template, out_file=get_rerw_commands(stim_file_name, 4*pA, -2*pA, 0*second, alpha, beta, None,
            e_desc='baseline.virtual_subject.%d.anode' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

        cmds, log_file_template, out_file=get_rerw_commands(stim_file_name, -4*pA, 2*pA, 0*second, alpha, beta, None,
            e_desc='baseline.virtual_subject.%d.cathode' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

        cmds, log_file_template, out_file=get_rerw_commands(stim_file_name, 2*pA, -4*pA, 0*second, alpha, beta, None,
            e_desc='baseline.virtual_subject.%d.anode_control_1' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

        cmds, log_file_template, out_file=get_rerw_commands(stim_file_name, -2*pA, 4*pA, 0*second, alpha, beta, None,
            e_desc='baseline.virtual_subject.%d.cathode_control_1' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

    if start_nodes:
        launcher.set_application_script(os.path.join(SRC_DIR, 'sh/ezrcluster-application-script.sh'))
        launcher.start_nodes()
Beispiel #6
0
def launch_virtual_subject_processes(nodes, data_dir, num_real_subjects, virtual_subj_ids, behavioral_param_file,
                                     start_nodes=True):
    """
    nodes = nodes to run simulation on
    data_dir = directory containing subject data
    num_real_subjects = number of real subjects
    num_virtual_subjects = number of virtual subjects to run
    behavioral_param_file = file containing subject fitted behavioral parameters
    start_nodes = whether or not to start nodes
    """

    # Setup launcher
    launcher=Launcher(nodes)

    # Get subject alpha and beta values
    f = h5py.File(behavioral_param_file)
    control_group=f['control']
    alpha_vals=np.array(control_group['alpha'])
    beta_vals=np.array(control_group['beta'])

    # For each virtual subject
    for virtual_subj_id in virtual_subj_ids:

        # Choose an actual subject
        stim_file_name=None
        control_file_name=None
        while True:
            i=np.random.choice(range(num_real_subjects))
            subj_id=i+1
            subj_stim_session_number=stim_order[i,LAT]
            stim_file_name='value%d_s%d_t2.mat' % (subj_id,subj_stim_session_number)
            subj_control_session_number=stim_order[i,NOSTIM1]
            control_file_name='value%d_s%d_t2.mat' % (subj_id,subj_control_session_number)
            if os.path.exists(os.path.join(data_dir,stim_file_name)) and \
               os.path.exists(os.path.join(data_dir,control_file_name)):
                break


        # Sample alpha from subject distribution - don't use subjects with high alpha
        alpha_hist,alpha_bins=np.histogram(alpha_vals[np.where(alpha_vals<.99)[0]], density=True)
        bin_width=alpha_bins[1]-alpha_bins[0]
        alpha_bin=np.random.choice(alpha_bins[:-1], p=alpha_hist*bin_width)
        alpha=alpha_bin+np.random.rand()*bin_width

        # Sample beta from subject distribution - don't use subjects with high alpha
        beta_hist,beta_bins=np.histogram(beta_vals[np.where(alpha_vals<.99)[0]], density=True)
        bin_width=beta_bins[1]-beta_bins[0]
        beta_bin=np.random.choice(beta_bins[:-1], p=beta_hist*bin_width)
        beta=beta_bin+np.random.rand()*bin_width

        cmds, log_file_template, out_file=get_rerw_commands(control_file_name, 0*pA, 0*pA, 0*second, alpha, beta, None,
            e_desc='virtual_subject.%d.control' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

        cmds, log_file_template, out_file=get_rerw_commands(stim_file_name, 4*pA, -2*pA, 0*second, alpha, beta, None,
            e_desc='virtual_subject.%d.anode' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

        cmds, log_file_template, out_file=get_rerw_commands(stim_file_name, -4*pA, 2*pA, 0*second, alpha, beta, None,
            e_desc='virtual_subject.%d.cathode' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

        cmds, log_file_template, out_file=get_rerw_commands(stim_file_name, 2*pA, -4*pA, 0*second, alpha, beta, None,
            e_desc='virtual_subject.%d.anode_control_1' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

        cmds, log_file_template, out_file=get_rerw_commands(stim_file_name, -2*pA, 4*pA, 0*second, alpha, beta, None,
            e_desc='virtual_subject.%d.cathode_control_1' % virtual_subj_id)
        launcher.add_job(cmds, log_file_template=log_file_template, output_file=out_file)

    if start_nodes:
        launcher.set_application_script(os.path.join(SRC_DIR, 'sh/ezrcluster-application-script.sh'))
        launcher.start_nodes()
Beispiel #7
0
def launch_control_virtual_subject_processes(nodes, mu_0, virtual_subj_ids, behavioral_param_file, trials,
                                             stim_gains=[8,6,4,2,1,0.5,0.25], start_nodes=True):
    """
    Launch stimulation intensity simulations with DCS applied only to pyramidal population
    nodes = nodes to run simulation on
    data_dir = directory containing subject data
    num_real_subjects = number of real subjects
    num_virtual_subjects = number of virtual subjects to run
    behavioral_param_file = file containing subject fitted behavioral parameters
    start_nodes = whether or not to start nodes
    """

    # Setup launcher
    launcher=Launcher(nodes)

    # Get subject alpha and beta values
    f = h5py.File(behavioral_param_file)
    control_group=f['control']
    alpha_vals=np.array(control_group['alpha'])
    beta_vals=np.array(control_group['beta'])

    # For each virtual subject
    for virtual_subj_id in virtual_subj_ids:

        wta_params=default_params()
        wta_params.mu_0=mu_0
        wta_params.p_a=mu_0/100.0
        wta_params.p_b=wta_params.p_a

        # Sample beta from subject distribution - don't use subjects with high alpha
        beta_hist,beta_bins=np.histogram(beta_vals[np.where(alpha_vals<.99)[0]], density=True)
        bin_width=beta_bins[1]-beta_bins[0]
        beta_bin=np.random.choice(beta_bins[:-1], p=beta_hist*bin_width)
        beta=beta_bin+np.random.rand()*bin_width
        wta_params.background_freq=(beta-161.08)/-.17

        contrast_range=[0.0, .032, .064, .128, .256, .512]
        for i,contrast in enumerate(contrast_range):
            inputs=np.zeros(2)
            inputs[0]=wta_params.mu_0+wta_params.p_a*contrast*100.0
            inputs[1]=wta_params.mu_0-wta_params.p_b*contrast*100.0
            for t in range(trials):
                np.random.shuffle(inputs)

                for idx, stim_gain in enumerate(stim_gains):
                    sim_params=simulation_params()
                    sim_params.p_dcs=stim_gain*pA
                    cmds,log_file_template,out_file=get_wta_cmds(wta_params, inputs, sim_params, contrast, t,
                        record_lfp=True, record_voxel=True, record_neuron_state=False, record_firing_rate=True,
                        record_spikes=True, save_summary_only=False,
                        e_desc='virtual_subject.%d.anode' % virtual_subj_id)
                    launcher.add_batch_job(cmds, log_file_template=log_file_template, output_file=out_file)

                    sim_params=simulation_params()
                    sim_params.p_dcs=-stim_gain*pA
                    cmds,log_file_template,out_file=get_wta_cmds(wta_params, inputs, sim_params, contrast, t,
                        record_lfp=True, record_voxel=True, record_neuron_state=False, record_firing_rate=True,
                        record_spikes=True, save_summary_only=False,
                        e_desc='virtual_subject.%d.cathode' % virtual_subj_id)
                    launcher.add_batch_job(cmds, log_file_template=log_file_template, output_file=out_file)

                    if idx==0:
                        sim_params=simulation_params()
                        cmds,log_file_template,out_file=get_wta_cmds(wta_params, inputs, sim_params, contrast, t,
                            record_lfp=True, record_voxel=True, record_neuron_state=False, record_firing_rate=True,
                            record_spikes=True, save_summary_only=False,
                            e_desc='virtual_subject.%d.control' % virtual_subj_id)
                        launcher.add_batch_job(cmds, log_file_template=log_file_template, output_file=out_file)

    launcher.post_jobs()

    if start_nodes:
        launcher.set_application_script(os.path.join(SRC_DIR, 'sh/ezrcluster-application-script.sh'))
        launcher.start_nodes()