Example #1
0
geo_var = cs.log(1 + gfp_var / gfp_mean**2)
#geo_mean = 2/3 * cs.log(gfp_mean) - 1/2 * cs.log(cs.exp(p[4]) + gfp_mean)
#2 * cs.log(gfp_mean) - 1/2 * cs.log(p[4]*gfp_mean + gfp_mean**2)
#geo_var = cs.log(cs.exp(p[4]) + gfp_mean) - cs.log(gfp_mean)

#link the deterministic model to the sampling statistics (here normal mean and variance)
lognorm_stats = cs.vertcat(geo_mean, geo_var)

#create a casadi function mapping input and parameters to sampling statistics (mean and var)
y = cs.Function('GFP', [x, p], [lognorm_stats])

# enter the function as a tuple with label indicating normal error, into observation list
observ_list = [(y, 'Lognormal')]

#instantiate nloed model class
nloed_model = Model(observ_list, xnames, pnames)

nat_params = [
    5.53127845e+02, 9.52661655e+03, 2.41382438e+00, 3.62505725e+02, 5500
]
nominal_params = np.log(nat_params)

####################################################################################################
# EVALUATE INITIAL DESIGN
####################################################################################################

init_design = pd.DataFrame({
    'Light': [.1] + [63] + [254] + [1022] + [4095],
    'Variable': ['GFP'] * 5,
    'Replicates': [15000] * 5
})
Example #2
0
B_val = 0.03525605965
K_m_val = 0.28169444728
K_g_val = 68.61725640178
Sigma_m_val = 0.01180105629
Sigma_g_val = 0.01381497437

fit_params = np.log([K_dr_val, K_T_val, B_o_val, B_val, K_m_val, K_g_val, Sigma_m_val, Sigma_g_val])

# enter the function as a tuple with label indicating normal error, into observation list
observ_list = [(gfp_model,'Normal')]
#create names for inputs
input_names = ['Light','Time']
#create names for parameters
parameter_names = ['K_dr', 'K_T', 'B_o', 'B', 'K_m', 'K_g', 'Sigma_m', 'Sigma_g']
#instantiate nloed model class
model_object = Model(observ_list,input_names,parameter_names)

#set up the design algorithm to use continuous (continuous) optimization with two unique inputs points
continuous_inputs={'Inputs':['Light','Time'],
                   'Bounds':[(.01,4095),(0,6*60)],
                   'Structure':[['L1','T1'],
                                ['L2','T2'],
                                ['L3','T3'],
                                ['L4','T4'],
                                ['L5','T5'],
                                ['L6','T6'],
                                ['L7','T7'],
                                ['L8','T8'],
                                ['L9','T9'],
                                ['L10','T10']],
                    'Initial':[[0.1,350],
Example #3
0
  mrna_stats = cs.vertcat(sample_list[i][0], 0.001)
  prot_stats = cs.vertcat(sample_list[i][1], 0.001)
  #create casadi function for mrna and prot stats
  mrna_func = cs.Function(mrna_name,[x,p],[mrna_stats])
  prot_func = cs.Function(prot_name,[x,p],[prot_stats])
  #append the casadi function and distribution type to obs struct
  ode_response.append((mrna_func,'Normal'))
  ode_response.append((prot_func,'Normal'))
  #store response names for plotting
  response_names.append(mrna_name)
  response_names.append(prot_name)

xnames = ['mrna_ic','prot_ic','cntrl_1','cntrl_2','cntrl_3']
pnames = ['alpha','delta','beta','gamma']

ode_model = Model(ode_response,xnames,pnames)

####################################################################################################
# GENERATE DESIGN
####################################################################################################

true_pars = np.log([0.5,1.1,2.1,0.3])

# discrete_inputs = {'Inputs':['mrna_ic','prot_ic','cntrl_1','cntrl_2','cntrl_3'],
#                  'Bounds':[(0,1),(0,1),(0,1),(0,1),(0,1)]}
# opt_design = Design(ode_model,true_pars,'D',discrete_inputs)
continuous_inputs = {'Inputs':['mrna_ic','prot_ic','cntrl_1','cntrl_2','cntrl_3'],
                     'Bounds':[(0,1),(0,1),(0,1),(0,1),(0,1),(0,1)],
                     'Structure':[['mrna_ic1','prot_ic1','c1_lvl1','c2_lvl1','c3_lvl1'],
                                  ['mrna_ic2','prot_ic2','c1_lvl2','c2_lvl2','c3_lvl2'],
                                  ['mrna_ic3','prot_ic3','c1_lvl3','c2_lvl3','c3_lvl3']]}
Example #4
0
#define the deterministic model
hull_func = cs.exp(p[0]) + cs.exp(p[1])*x[0]**cs.exp(p[2])/(cs.exp(p[3])**cs.exp(p[2])+x[0]**cs.exp(p[2]))

#link the deterministic model to the sampling statistics (here normal mean and variance)
mean, var = hull_func, 0.1
normal_stats = cs.vertcat(mean, var)

#create a casadi function mapping input and parameters to sampling statistics (mean and var)
y = cs.Function('y',[x,p],[normal_stats])

# enter the function as a tuple with label indicating normal error, into observation list
observ_list = [(y,'Normal')]

#instantiate nloed model class
nloed_model = Model(observ_list,xnames,pnames)

####################################################################################################
# GENERATE OPTIMAL DESIGN
####################################################################################################

#set up the design algorithm to use continuous (continuous) optimization with two unique inputs points
continuous_inputs={'Inputs':['Light'],'Bounds':[(.1,5)],'Structure':[['x1'],['x2'],['x3'],['x4']]}

true_param = [1,5,2,1]
# generate the optimal discreteimate (relaxed) design
relaxed_design = Design(nloed_model,np.log(true_param),'D',continuous_inputs=continuous_inputs)

sample_size = 30
#generate a rounded exact design 
exact_design = relaxed_design.round(sample_size)
Example #5
0
#create a casadi function for y1 stats
eta_y1 = cs.vertcat(mean_y1, var_y1)
func_y1 = cs.Function('y1',[x,theta],[eta_y1])
#create a casadi function for y2 stats
eta_y2 = rate_y2
func_y2 = cs.Function('y2',[x,theta],[eta_y2])

###block 2
#create observation list
observ_list = [(func_y1,'Normal'),(func_y2,'Poisson')]
#creat input name list
input_names = ['x1','x2']
#create parameter name list
parameter_names = ['Theta0','Theta1','Theta2','Theta3']
#create NLOED Model
model_object = Model(observ_list, input_names, parameter_names)

###block 3
design = pd.DataFrame({ 'x1':[0,-1,2,3]*2,
                        'x2':[1,1,-1,0]*2,
                        'Variable':['y1']*4 + ['y2']*4,
                        'Replicates':[3,1,2,2]*2})
print('')
print('')
print(design)
print('')
print('')

###block 4
design = pd.DataFrame({ 'x1':[0,-1,2,3]*2,
                        'x2':[1,1,-1,0]*2,
Example #6
0
    #append the casadi function and distribution type to obs struct
    observation_structure.extend([(mrna_func, 'Normal'),
                                  (prot_func, 'Normal')])
    #store observation names, useful for plotting
    observation_names.extend([mrna_name, prot_name])
    #store observation type
    observation_type.extend(['RNA', 'Prot'])
    #store observation time
    observation_times.extend([times[i]] * 2)

input_names = ['Init_Inducer', 'Inducer_1', 'Inducer_2', 'Inducer_3']
parameter_names = [
    'log_Alpha', 'log_K', 'log_Delta', 'log_Beta', 'log_L', 'log_Gamma'
]

model_object = Model(observation_structure, input_names, parameter_names)

#***hidden_start****
#generate initial dataset
# create data frame of inputs that need predictions
init_design = pd.DataFrame({
    'Init_Inducer': [0.] * len(observation_names),
    'Inducer_1': [1.] * len(observation_names),
    'Inducer_2': [0.] * len(observation_names),
    'Inducer_3': [3.] * len(observation_names),
    'Variable': observation_names,
    'Replicats': [1] * len(observation_names)
})
true_param = np.log([2, 1.5, 1, 3, 0.75, 0.5])
init_data = model_object.sample(init_design, true_param)
print('')
Example #7
0
    prot_name = 'prot_' + 't' + "{0:0=2d}".format(times[i])
    prot_stats = cs.vertcat(sample_list[i][1], 0.001)
    prot_func = cs.Function(prot_name, [x, p], [prot_stats])
    ode_response.append((prot_func, 'Normal'))
    response_names.append(prot_name)
    replicates.append(5)

design = design.reindex(design.index.repeat(len(response_names)))
design['Variable'] = response_names
design['Replicats'] = replicates
design = design.sort_values(by='Variable').reset_index()

xnames = ['mrna_ic', 'prot_ic', 'cntrl_1', 'cntrl_2', 'cntrl_3']
pnames = ['alpha', 'delta', 'beta', 'gamma']

ode_model = Model(ode_response, xnames, pnames, {'ScalarSymbolics': False})

predict_inputs = pd.DataFrame({
    'mrna_ic': [1] * len(response_names),
    'prot_ic': [1] * len(response_names),
    'cntrl_1': [0.1] * len(response_names),
    'cntrl_2': [1.0] * len(response_names),
    'cntrl_3': [0.1] * len(response_names),
    'Variable': response_names
})

true_pars = [np.log(0.5), np.log(1.1), np.log(2.1), np.log(0.3)]
predictions = ode_model.predict(predict_inputs, true_pars)

digit_re = re.compile('[a-z]+_t(\d+)')
type_re = re.compile('([a-z]+)_t\d+')
Example #8
0
            - (y_ss(x,p)-y0)*cs.exp(-cs.exp(p[4])*x[1]) \
            - (y_ss(x,p)-y0)*(cs.exp(p[4])/cs.exp(p[5]))*\
                 (cs.exp(-cs.exp(p[4])*x[1]) - cs.exp(-(cs.exp(p[4])+cs.exp(p[5]))*x[1]))

#link the deterministic model to the sampling statistics (here normal mean and variance)
mean, var = gfp_model, 100**2/3
normal_stats = cs.vertcat(mean, var)

#create a casadi function mapping input and parameters to sampling statistics (mean and var)
y = cs.Function('GFP',[x,p],[normal_stats])

# enter the function as a tuple with label indicating normal error, into observation list
observ_list = [(y,'Normal')]

#instantiate nloed model class
nloed_model = Model(observ_list, xnames, pnames)

####################################################################################################
# Fit
####################################################################################################

# obs = [ 1951,	723,	580,
#         1479,	1350,	544,
#         1516,	680,	543,
#         2057,	628,	766,
#         1360,	741,	699,
#         1670,	640,	570,
#         1164,	1135,	2064,
#         1419,	1340,	2171,
#         1330,	1615,	4598,
#         1520,	4024,	8814,
Example #9
0
####################################################################################################
# SET UP MODEL
####################################################################################################

xs = cs.SX.sym('xs', 2)
xnames = ['x1', 'x2']
ps = cs.SX.sym('ps', 4)
pnames = ['Intercept', 'Slope1', 'Slope2', 'Interaction']

lin_predictor = ps[0] + ps[1] * xs[0] + ps[2] * xs[1] + ps[3] * xs[0] * xs[1]

mean, var = lin_predictor, 0.1
normal_stats = cs.vertcat(mean, var)
y = cs.Function('y', [xs, ps], [normal_stats])

lin_model = Model([(y, 'Normal')], xnames, pnames)

####################################################################################################
# GENERATE DESIGN
####################################################################################################

true_param = [1, 1, 1, 1]

discrete_inputs = {'Inputs': ['x1'], 'Bounds': [(-1, 1)]}
continuous_inputs = {
    'Inputs': ['x2'],
    'Bounds': [(-1, 1)],
    'Structure': [['level1'], ['level2']]
}
## discrete_inputs={'Inputs':['x1','x2'],'Bounds':[(-1,1),(-1,1)]}
## continuous_inputs={'Inputs':['x1','x2'],'Bounds':[(-1,1),(-1,1)],'Structure':[['x1_lvl1','x2_lvl1'],['x1_lvl1','x2_lvl2'],['x1_lvl2','x2_lvl2']]}
Example #10
0
B_o_val = 0.073858518
B_val = 1.8532546

B_o_prime_val = B_o_val * K_g_val / Sigma_m_val / Sigma_g_val
B_prime_val = B_val * K_g_val / Sigma_m_val / Sigma_g_val

fit_params = np.log([K_dr_val, K_T_val, B_o_prime_val, B_prime_val, K_m_val])

# enter the function as a tuple with label indicating normal error, into observation list
observ_list = [(gfp_model, 'Normal')]
#create names for inputs
input_names = ['Light']
#create names for parameters
parameter_names = ['K_dr', 'K_T', 'B_o_prime', 'B_prime', 'K_m']
#instantiate nloed model class
static_model = Model(observ_list, input_names, parameter_names)

# #generate predictions with error bars fdor a random selection of inputs)
# prediction_inputs = pd.DataFrame({'Light':np.linspace(0.1,4095,100),
#                                   'Variable':['GFP']*100})
# #generate predictions and intervals
# predictions = static_model.predict(prediction_inputs,
#                                    fit_params,
#                                    options ={'Sensitivity':True})
# #create plot
# fig, ax = plt.subplots()
# #plot mean model prediction
# ax.plot(predictions['Inputs','Light'], predictions['Prediction','Mean'], '-')
# ax.set_xlabel('Light')
# ax.set_ylabel('GFP')
# plt.show()
Example #11
0
normal_stats = cs.vertcat(mean, var)
y_norm_func = cs.Function('y_norm',[xs,ps],[normal_stats])

rate = cs.exp(lin_predictor)
poisson_stats = rate
y_pois_func = cs.Function('y_pois',[xs,ps],[poisson_stats])

prob = cs.exp(lin_predictor)/(1+cs.exp(lin_predictor))
bern_stats = prob
y_bern_func = cs.Function('y_bern',[xs,ps],[bern_stats])

mixed_response = [  (y_norm_func,'Normal'),
                    (y_bern_func,'Bernoulli'),
                    (y_pois_func,'Poisson')]

mixed_model = Model(mixed_response,xnames,pnames)


####################################################################################################
# GENERATE DESIGN
####################################################################################################

true_param = [0.5,1.1,2.1,0.3]

discrete_inputs = {'Inputs':['x1'],'Bounds':[(-1,1)]}
continuous_inputs = {'Inputs':['x2'],'Bounds':[(-1,1)],'Structure':[['level1'],['level2']]}
opt_design = Design(mixed_model,true_param,'D',discrete_inputs,continuous_inputs)

sample_size = 10
exact_design = opt_design.round(sample_size)