Example #1
0
    resolution=0.1,
    stim_duration=1,
    ITImodel='exponential',
    ITImin=1,
    ITImean=2,
    ITImax=5,
    confoundorder=3,  # this cannot be 0
    hardprob=True,
)

# optimize the design for detection efficiency only using GA
POP_GA = optimisation(experiment=EXP,
                      weights=[0, 0.5, 0.5, 0],
                      preruncycles=2,
                      cycles=cycles,
                      seed=1,
                      outdes=5,
                      I=10,
                      folder='/tmp/',
                      optimisation='GA',
                      R=[0.5, 0.5, 0.0])

POP_GA.optimise()

# print the best model score
print("Score: %s " % POP_GA.optima[::-1][0])
print("N trials: %d " % len(POP_GA.bestdesign.onsets))

# Let's look at the resulting experimental designs.

# this plots the columns of the X matrix convolved with the HRF
plt.figure(figsize=(10, 7))
Example #2
0
order = neurodesign.generate.order(nstim=4,
                                   ntrials=100,
                                   probabilities=[0.25, 0.25, 0.25, 0.25],
                                   ordertype='random',
                                   seed=1234)
print(order[:10])
Counter(order)

iti, lam = neurodesign.generate.iti(ntrials=40,
                                    model='exponential',
                                    min=2,
                                    mean=3,
                                    max=8,
                                    resolution=0.1,
                                    seed=2134)

print(iti[:10])
print("mean ITI: %s \n\
      min ITI: %s \n\
      max ITI: %s" %
      (round(sum(iti) / len(iti), 2), round(min(iti), 2), round(max(iti), 2)))

POP = neurodesign.optimisation(experiment=EXP,
                               weights=[0, 0.5, 0.25, 0.25],
                               preruncycles=10,
                               cycles=100,
                               folder="./",
                               seed=100)
POP.optimise()
Example #3
0
    stim_duration=1,
    t_pre = 0,
    t_post = 2,
    restnum=0,
    restdur=0,
    ITImodel = "exponential",
    ITImin = 1,
    ITImean = 2,
    ITImax=4
    )

POP = optimisation(
    experiment=EXP,
    weights=[0,0.5,0.25,0.25],
    preruncycles = 10,
    cycles = 10,
    seed=1,
    outdes=5,
    folder='/Users/Joke/'
    )

#########################
# run natural selection #
#########################

POP.optimise()
POP.download()
POP.evaluate()

################
# step by step #
Example #4
0
                 P=[0.25, 0.25, 0.25],
                 C=[[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, -1]],
                 n_stimuli=3,
                 rho=0.3,
                 resolution=0.1,
                 stim_duration=1,
                 ITImodel="exponential",
                 ITImin=0.3,
                 ITImean=1,
                 ITImax=4)

# In[3]:

POP_Max = optimisation(experiment=EXP,
                       weights=[0, 0.5, 0.25, 0.25],
                       preruncycles=cycles,
                       cycles=2,
                       optimisation='GA')

POP_Max.optimise()

# In[4]:

EXP.FeMax = POP_Max.exp.FeMax
EXP.FdMax = POP_Max.exp.FdMax

# Below we define two populations of designs.  We will optimise one using the genetic algorithm, and the other using randomly drawn designs.
#
# We optimise for statistical power (weights = [0,1,0,0]).  We run 100 cycles.

# In[5]:
Example #5
0
                 rho=0.3,
                 resolution=0.1,
                 stim_duration=1,
                 t_pre=0,
                 t_post=2,
                 restnum=0,
                 restdur=0,
                 ITImodel="exponential",
                 ITImin=1,
                 ITImean=2,
                 ITImax=4)

POP = optimisation(experiment=EXP,
                   weights=[0, 0.5, 0.25, 0.25],
                   preruncycles=10,
                   cycles=10,
                   seed=1,
                   outdes=5,
                   folder=os.getcwd())

#########################
# run natural selection #
#########################

POP.optimise()
POP.download()
POP.evaluate()

################
# step by step #
################