def _buildEdit(self): """ Creates the link qgrid widget """ self._listLinkSource = [''] self._listLinkTarget = [''] parse = os.path.split(self._datas['Path'])[0] parsing = pparse.model_parser(parse) for i in parsing: if 'unit.{}.xml'.format(i.name) in self._listmodel: for j in i.inputs: self._listLinkTarget.append('{}.{}'.format(i.name, j.name)) for k in i.outputs: self._listLinkSource.append('{}.{}'.format(i.name, k.name)) for model in self._listmodel: if ':' in model: pkgname, model_attr = model.split(':') model_attr = model_attr.split('.') path, = [i for i in self._listextpkg if pkgname in os.path.split(i)[1]] if model_attr[0] == 'composition': pkg, = model_parser(path+os.path.sep+'crop2ml'+os.path.sep+'composition.{}.xml'.format(model_attr[1])) for j in pkg.inputs: self._listLinkTarget.append('{}.{}'.format(model_attr[1], j)) for k in pkg.outputs: self._listLinkSource.append('{}.{}'.format(model_attr[1], k)) else: pkg = pparse.model_parser(path) for m in pkg: if m.name == model_attr[1]: for y in m.inputs: self._listLinkTarget.append('{}.{}'.format(model_attr[1], y.name)) for z in m.outputs: self._listLinkSource.append('{}.{}'.format(model_attr[1], z.name)) break if self._iscreate: self._dfLink = pandas.DataFrame(data={ 'Link type': pandas.Categorical([''], categories=['','InputLink','InternalLink','OutputLink'], ordered=True), 'Source': pandas.Categorical([''], categories=self._listLinkSource), 'Target': pandas.Categorical([''], categories=self._listLinkTarget) }) else: self._dfLink = pandas.DataFrame(data={ 'Link type': pandas.Categorical([i['Link type'] for i in self._listlink], categories=['','InputLink','InternalLink','OutputLink'], ordered=True), 'Source': pandas.Categorical([i['Source'] if i['Source'] in self._listLinkSource else '' for i in self._listlink], categories=self._listLinkSource), 'Target': pandas.Categorical([i['Target'] if i['Target'] in self._listLinkTarget else '' for i in self._listlink], categories=self._listLinkTarget) }) self._dfLinkqgrid = qgrid.show_grid(self._dfLink, show_toolbar=True)
def meta_inp(self, pkgname): pkg, mc = self.retrive(pkgname) list_var = [] mod_inputs = [] for inter in mc.inputlink: var = inter["source"] mod = inter["target"].split(".")[0] if var not in list_var: if self.check_compo(mc, mod) != True: inp = self.info_inputs_mu(pkg, mod, var) if inp is None: print(mod, var) list_var.append(var) mod_inputs.append(inp) else: name = self.pkg_m(mc, mod) pos = [j for j, k in enumerate(mc.model) if k.name == mod][0] if mc.model[pos].file.split(".")[0] == "unit": mc_path = self.retrive(name)[1].path inps = [ m.inputs for m in model_parser(mc_path) if m.name == mod ][0] inp = [k for k in inps if k.name == var][0] else: inp = self.get_mu_inp(name, var) mod_inputs.append(inp) list_var.append(var) return mod_inputs
def example(): #fn = data.glob('Example*.xml') #print (fn) models = pparse.model_parser(data) return models
def info_inputs_mu(self, ppkg, mu, varname): mod = model_parser(ppkg) for m in mod: if m.name == mu: for inp in m.inputs: if inp.name == varname: return inp
def test_pkg2py_nrj(): models = pparse.model_parser(pkg_nrj) m2p = render_python.Model2Package(models, dir='.', pkg_name="EnergyBalance") m2p.run()
def info_outputs_mu(self, ppkg, mu, varname): mod = model_parser(ppkg) for m in mod: if m.name == mu: for out in m.outputs: if out.name == varname: return out
def testXmlwf(): models = pparse.model_parser(data) # translate cropml model units to python functions and openalea in python_model repository render_python.Model2Package(models, dir='.', pkg_name="EnergyBalance").run() # repository of python models generated with wralea dir = cwd / 'python_model' rep_composite = data / 'crop2ml' # composite file #print(rep_composite.glob("composition*.xml")[0]) compositionFile = rep_composite.glob("composition*.xml")[0] xmlwf, = composition.model_parser(compositionFile) wf = XmlToWf(xmlwf, dir, "EnergyBalance") wf.run() #print(wf.inputs, wf.outputs) yet_in = [] yet_out = [] inputfile = dir / "input.txt" outputfile = dir / "output.txt" fi_in = open(inputfile, "w") fi_out = open(outputfile, "w") writer_input = csv.writer(fi_in, lineterminator='\n', delimiter=";") writer_output = csv.writer(fi_out, lineterminator='\n', delimiter=";") i = 0 writer_input.writerow(["name", "description", "input type", "unit"]) writer_output.writerow(["name", "description", "unit"]) for inp in wf.inputs: for model in models: for input in model.inputs: if input.name == inp and input.name not in yet_in and input.name not in wf.outputs: writer_input.writerow([ inp, input.description.encode("utf-8"), input.inputtype, input.unit.encode("utf-8") ]) yet_in.append(inp) i = i + 1 for out in wf.outputs: for model in models: for output in model.outputs: if output.name == out and output.name not in yet_out: writer_output.writerow([ out, output.description.encode("utf-8"), output.unit.encode("utf-8") ]) yet_out.append(out) i = i + 1
def transpile_package(package, language): # translate from crop2ml package sourcef = package pkg = Path(sourcef) models = model_parser(pkg) # parse xml files and create python model object output = pkg/'src' dir_test= pkg/'test' m=[model.name for model in models] # Generate packages if the directories does not exists. if not output.isdir(): output.mkdir() if not dir_test.isdir(): dir_test.mkdir() m2p = render_cyml.Model2Package(models, dir=output) m2p.generate_package() # generate cyml models in "pyx" directory tg_rep = Path(output/"%s"%(language)) # target language models directory in output dir_test_lang = Path(dir_test/"%s"%(language)) if not tg_rep.isdir(): tg_rep.mkdir() if not dir_test_lang.isdir() : #Create if it doesn't exist dir_test_lang.mkdir() # generate cyml_rep = Path(output/'pyx') # cyml model directory in output for k, file in enumerate(cyml_rep.files()): #print(file) with open(file, 'r') as fi: source = fi.read() name = os.path.split(file)[1].split(".")[0] for model in models: # in the case we have'nt the same order if name == model.name.lower(): test=Main(file, language, model) test.parse() test.to_ast(source) code=test.to_source() filename = tg_rep/"%s.%s"%(name, language) with open(filename, "wb") as tg_file: tg_file.write(code.encode('utf-8')) # writeTest test = WriteTest(models,language,dir_test_lang) test.write() status = 0 return status
def __init__(self, name, pkg=None): self.name = name if pkg is None: if self.isPackage(self.name): self.pkg = self.load_pkge(self.name).crop2mlpath else: self.pkg = input("Give the path of package") else: self.pkg = pkg self.data = Path(self.pkg) / "crop2ml" composite_file = self.data.glob("composition*.xml")[0] self.mu = model_parser(self.pkg) self.model, = composition.model_parser(composite_file) self.pkgs[self.name] = [self.pkg, self.model] self.model.inputs = self.meta_inp(self.name) self.model.outputs = self.meta_out(self.name) self.model.ext = self.meta_ext(self.name) self.model.path = Path(self.pkg)
def testXmlwf(): #model_units = data.glob('unit*.xml') models = pparse.model_parser(data) # translate cropml model units to python functions and openalea in python_model repository render_python.Model2Package(models, dir='.', pkg_name="Phenology").run() # repository of python models generated with wralea dir = cwd / 'python_model' rep_composite = data / 'crop2ml' # composite file #print(rep_composite.glob("composition*.xml")[0]) compositionFile = rep_composite.glob("composition*.xml")[0] xmlwf, = composition.model_parser(compositionFile) XmlToWf(xmlwf, dir, "Phenology").run()
def _getDoc(self, f): """ Returns the documentation of the current's model xml file """ parse = os.path.split(self._datas['Path'])[0] parsing = model_parser(parse) index = None for j in range(0, len(parsing)): if parsing[j].name == self._datas['Model name']: index = j break if index is None: f.close() with self._out: raise Exception('Critical error : model not found.') return docGenerator.DocGenerator(parsing[index])
def test_pkg2py_nrj(): models = pparse.model_parser(pkg_pheno) m2p = render_python.Model2Package(models, dir='.', pkg_name="Phenology") m2p.run()
def test_pheno(): models = pparse.model_parser(pkg_pheno) assert len(models) == 12
def test_nrj(): models = pparse.model_parser(pkg_nrj) assert len(models) == 13
def transpile_package(package, language): # translate from crop2ml package sourcef = package namep = sourcef.split(".")[0] pkg = Path(sourcef) models = model_parser( pkg) # parse xml files and create python model object output = Path(os.path.join(pkg, 'src')) dir_test = Path(os.path.join(pkg, 'test')) # Generate packages if the directories does not exists. if not output.isdir(): output.mkdir() if not dir_test.isdir(): dir_test.mkdir() m2p = render_cyml.Model2Package(models, dir=output) m2p.generate_package() # generate cyml models in "pyx" directory tg_rep = Path(os.path.join( output, language)) # target language models directory in output dir_test_lang = Path(os.path.join(dir_test, language)) if not tg_rep.isdir(): tg_rep.mkdir() if not dir_test_lang.isdir(): #Create if it doesn't exist dir_test_lang.mkdir() m2p.write_tests() # generate cyml_rep = Path(os.path.join(output, 'pyx')) # cyml model directory in output T = Topology(namep, package) T_pyx = T.algo2cyml() namep = T.model.name.lower() fileT = Path(os.path.join(cyml_rep, "%s.pyx" % namep)) with open(fileT, "wb") as tg_file: tg_file.write(T_pyx.encode('utf-8')) if language in ("cs", "java"): getattr( getattr(pycropml.transpiler.generators, '%sGenerator' % NAMES[language]), 'to_struct_%s' % language)([T.model], tg_rep, namep) to_wrapper_cs(T.model, tg_rep, namep) if language == "cs" else "" filename = Path( os.path.join(tg_rep, "%s.%s" % (namep.capitalize(), language))) with open(filename, "wb") as tg_file: tg_file.write(T.compotranslate(language).encode('utf-8')) for k, file in enumerate(cyml_rep.files()): #print(file) with open(file, 'r') as fi: source = fi.read() name = os.path.split(file)[1].split(".")[0] for model in models: # in the case we have'nt the same order if name.lower() == model.name.lower( ) and prefix(model) != "function": test = Main(file, language, model, T.model.name) test.parse() test.to_ast(source) code = test.to_source() filename = Path( os.path.join(tg_rep, "%s.%s" % (name.capitalize(), language))) with open(filename, "wb") as tg_file: tg_file.write(code.encode('utf-8')) Model2Nb(model, code, name, dir_test_lang).generate_nb(language, tg_rep, namep) #code2nbk.generate_notebook(code, name, dir_nb_lang) # writeTest '''TODO''' #test = WriteTest(models,language,dir_test_lang) #test.write() status = 0 return status
def transpile_package(package, language): """ translate from crop2ml package""" sourcef = package namep = sourcef.split(os.path.sep)[-1] pkg = Path(sourcef) models = model_parser( pkg) # parse xml files and create python model object output = Path(os.path.join(pkg, 'src')) dir_test = Path(os.path.join(pkg, 'test')) # Generate packages if the directories does not exists. if not output.isdir(): output.mkdir() if not dir_test.isdir(): dir_test.mkdir() m2p = render_cyml.Model2Package(models, dir=output) m2p.generate_package() # generate cyml models in "pyx" directory tg_rep1 = Path(os.path.join( output, language)) # target language models directory in output dir_test_lang = Path(os.path.join(dir_test, language)) if not tg_rep1.isdir(): tg_rep1.mkdir() tg_rep = Path(os.path.join(tg_rep1, namep)) if not tg_rep.isdir(): tg_rep.mkdir() if not dir_test_lang.isdir(): #Create if it doesn't exist dir_test_lang.mkdir() m2p.write_tests() # generate cyml functions cyml_rep = Path(os.path.join(output, 'pyx')) # cyml model directory in output # cretae topology of composite model T = Topology(namep, package) namep = T.model.name # Record VPZ if language == "record": vpz = Crop2ML_Vpz(T) print(vpz.create()) # domain class if language in domain_class: getattr( getattr(pycropml.transpiler.generators, '%sGenerator' % NAMES[language]), 'to_struct_%s' % language)([T.model], tg_rep, namep) # wrapper if language in wrapper: getattr( getattr(pycropml.transpiler.generators, '%sGenerator' % NAMES[language]), 'to_wrapper_%s' % language)(T.model, tg_rep, namep) # Transform model unit to languages and platforms for k, file in enumerate(cyml_rep.files()): with open(file, 'r') as fi: source = fi.read() name = os.path.split(file)[1].split(".")[0] for model in models: # in the case we have'nt the same order if name.lower() == model.name.lower( ) and prefix(model) != "function": test = Main(file, language, model, T.model.name) test.parse() test.to_ast(source) code = test.to_source() filename = Path( os.path.join(tg_rep, "%s.%s" % (name.capitalize(), ext[language]))) with open(filename, "wb") as tg_file: tg_file.write(code.encode('utf-8')) Model2Nb(model, code, name, dir_test_lang).generate_nb(language, tg_rep, namep) #code2nbk.generate_notebook(code, name, dir_nb_lang) # Create Cyml Composite model T_pyx = T.algo2cyml() fileT = Path(os.path.join(cyml_rep, "%sComponent.pyx" % namep)) with open(fileT, "wb") as tg_file: tg_file.write(T_pyx.encode('utf-8')) filename = Path( os.path.join(tg_rep, "%sComponent.%s" % (namep, ext[language]))) with open(filename, "wb") as tg_file: tg_file.write(T.compotranslate(language).encode('utf-8')) ## create computing algorithm if language == "py": simulation = PythonSimulation(T.model) simulation.generate() code = ''.join(simulation.result) filename = Path(os.path.join(tg_rep, "simulation.py")) initfile = Path(os.path.join(tg_rep, "__init__.py")) with open(filename, "wb") as tg_file: tg_file.write(code.encode("utf-8")) with open(initfile, "wb") as tg_file: tg_file.write("".encode("utf-8")) setup = PythonSimulation(T.model) setup.generate_setup() code = ''.join(setup.result) setupfile = Path(os.path.join(tg_rep1, "setup.py")) with open(setupfile, "wb") as tg_file: tg_file.write(code.encode("utf-8")) status = 0 return status
def test_pkg2py_phen(): models = pparse.model_parser(pkg_pheno) m2p = render_cyml.Model2Package(models, dir='.', pkg_name="Phenology") m2p.run() transpile_package(pkg_pheno, "py")
def test_pkg2py_nrj(): models = pparse.model_parser(pkg_nrj) m2p = render_cyml.Model2Package(models, dir='.', pkg_name="EnergyBalance") m2p.run() transpile_package(pkg_nrj, "py")
def example(): fn = data.glob('Example*.xml')[0] models = pparse.model_parser(fn) return models