def to_copasi(self) -> model.Model: """ Build a copasi file from the sbml generated from tellurium Returns: A :py:class:`tasks.Model` """ return model.loada(self.to_antimony(), self.copasi_file)
def load_model(k1, k_minus1, k2): # create a base directory to work from working_directory = os.path.abspath('') filename = f'Model_k1={k1}_kminus1={k_minus1}_k2={k2}.cps' # create a string to a copasi file on system copasi_filename = os.path.join(working_directory, filename) antimony_string = f''' model negative_feedback() // define compartments compartment cell = 1.0 //define species S in cell P in cell C in cell I in cell //define initial conditions S = 1 C = 1 I = 0 P = 0 // reaction parameters k1 = {k1} k_minus1 = {k_minus1} k2 = {k2} //define reactions R1: S + C => I; cell * k1 * S * C R_minus1: I => S + C; cell * k_minus1 * I R3: I => P + C; cell * k2 * I end ''' return model.loada(antimony_string, copasi_filename) # load the model
def setUp(self): super(ProfileLikelihoodTests, self).setUp() ant_str = """ model new_model R1: A -> B ; _k1*A; R2: B -> A; k2*B; R3: C -> D; _k3*C*B; R4: D -> C; k4*D; A = 100; B = 0; _k1=0.1; k2 = 0.01 _k3 = 0.01 k4 = 1 end """ self.copasi_file = os.path.join(os.path.dirname(__file__), 'test_model.cps') self.model = model.loada(ant_str, self.copasi_file) self.fname = os.path.join(os.path.dirname(__file__), 'timecourse.txt') self.data = self.model.simulate(0, 10, 1, report_name=self.fname) with tasks.ParameterEstimation.Context(self.model, self.fname, context='s', parameters='g') as context: context.set('method', 'hooke_jeeves') context.set('run_mode', True) context.set('prefix', '_') context.set('randomize_start_values', True) config = context.get_config() self.pe = tasks.ParameterEstimation(config) data = viz.Parse(self.pe).data['test_model'] self.rss = data.loc[0, 'RSS'] self.pe_mod = self.pe.models['test_model'].model self.pe_mod.insert_parameters(df=data, index=0, inplace=True)
#context.set('number_of_generations', 300) context.set('run_mode', True) ##defaults to False context.set('pe_number', 3) ## number of repeat items in scan task #context.set('copy_number', 2) ## number of times to copy model #context.set('problem', 'Problem1') #context.set('fit', 3) #context.set('prefix', 'k') config = context.get_config() pe = tasks.ParameterEstimation(config) #myPctMod.open() data = viz.Parse(pe) print(data) myExMod = model.loada(model_string_ex, copasi_ex_filename) def my_add_cols(inFName, outFName, sState, i1State, i2State): myData = pd.read_csv( os.path.join(os.path.dirname(working_directory), 'data', inFName)) myData.insert(len(myData.columns), 'S_indep', sState) myData.insert(len(myData.columns), 'I1_indep', i1State) myData.insert(len(myData.columns), 'I2_indep', i2State) myData.rename(columns={ "pN": "Np", "pG": "Gp", "pD": "Dp", "pK": "Kp" }, inplace=True) myData.to_csv(os.path.join(working_directory, outFName))
ADeg_k1 = 0.1; // Other declarations: var ThisIsAssignment; const nuc, cyt, A2B, B2C; // Unit definitions: unit volume = 1e-3 litre; unit substance = 1e-3 mole; // Display Names: A2B_0 is "A2B"; B2C_0 is "B2C"; end""" copasi_file = os.path.join(os.getcwd(), 'copasi_file.cps') mod = model.loada(ant, copasi_file=copasi_file) TC1 = tasks.TimeCourse(mod, end=1000, step_size=100, intervals=10, report_name='report1.txt') TC2 = tasks.TimeCourse(mod, end=1000, step_size=100, intervals=10, report_name='report2.txt') TC3 = tasks.TimeCourse(mod, end=1000,
os.environ["PATH"] += os.pathsep + "/Applications/copasi" working_dir = os.path.abspath('') copasi_filename = os.path.join(working_dir, "testNoRun.cps") test_string = """ model test_model() var A R1: ->A; 1 R2: A->; 1 end """ theModel = model.loada(test_string, copasi_filename) theTimeCourse = tasks.TimeCourse(theModel, run=False) shellString = """#!/bin/bash # Example SLURM job script for serial (non-parallel) jobs # # # Tell SLURM if you want to be emailed when your job starts, ends, etc. # Currently mail can only be sent to addresses @ncl.ac.uk # #SBATCH --mail-type=ALL #SBATCH [email protected] #
def antStringToDiagramSBML(antString, filePath, tempPath): myAntList = oldAntStr.splitlines() myAntList = [antLineReactionReducer(line) for line in myAntList] newAntStr = "\n".join(myAntList) newModel = model.loada(newAntStr, tempPath) newModel.to_sbml(sbml_file=filePath)
Created on Wed Jan 22 16:04:37 2020 @author: peter """ from pycotools3 import model import re, os copasiPath = "/Applications/copasi" if not re.search(copasiPath, os.environ["PATH"]): os.environ["PATH"] += os.pathsep + copasiPath working_directory = os.path.dirname(os.path.abspath(__file__)) f = open(os.path.join(working_directory,'antFile.txt'), "r" ) oldAntStr = f.read() f.close() copasi_filename = os.path.join(working_directory,'temp.cps') model = model.loada(oldAntStr, copasi_filename) kineticNames = model.get_variable_names(which='gl', include_assignments=False) metaboliteNames = model.get_variable_names(which='m', include_assignments=False) model.parameters[kineticNames].to_csv( path_or_buf=os.path.join(working_directory,"oldKVals.csv"))
A = 100; B = 1; C = 1; // reaction parameters k1 = 0.1; k2 = 0.1; k3 = 0.1; k4 = 0.1; end """ copasi_file = os.path.join(working_directory, 'example_model.cps') ## build model mod = model.loada(antimony_string, copasi_file) assert isinstance(mod, model.Model) ## simulate some data, returns a pandas.DataFrame data = mod.simulate(0, 20, 1) ## write data to file experiment_filename = os.path.join(working_directory, 'experiment_data.txt') data.to_csv(experiment_filename) with tasks.ParameterEstimation.Context( mod, experiment_filename, context='pl', parameters='gm' ) as context: context.set('method', 'hooke_jeeves')