def __init__(símismo, archivo, nombre='mds'): nmbr, ext = os.path.splitext(archivo) if ext == '.mdl': símismo.tipo = '.mdl' # Únicamente recrear el archivo .py si necesario if os.path.isfile(nmbr + '.py') and (os.path.getmtime(nmbr + '.py') > os.path.getmtime(archivo)): símismo.modelo = pysd.load(nmbr + '.py') else: símismo.modelo = pysd.read_vensim(archivo) elif ext in ['.xmile', '.xml']: símismo.tipo = '.xmile' símismo.modelo = pysd.read_xmile(archivo) elif ext == '.py': # Modelos PySD ya traducidos símismo.tipo = '.py' símismo.modelo = pysd.load(archivo) else: raise ValueError( _('PySD no sabe leer modelos del formato "{}". Debes darle un modelo ".mdl" o ".xmile".' ).format(ext)) símismo._conv_nombres = {} símismo.tiempo_final = None símismo.cont_simul = False símismo.paso_act = 0 símismo.vars_para_cambiar = {} símismo._res_recién = None # pd.DataFrame super().__init__(archivo, nombre=nombre)
def test_run_ignore_missing(self): import pysd from warnings import catch_warnings model_mdl = 'test-models/tests/get_with_missing_values_xlsx/'\ + 'test_get_with_missing_values_xlsx.mdl' model_py = 'test-models/tests/get_with_missing_values_xlsx/'\ + 'test_get_with_missing_values_xlsx.py' with catch_warnings(record=True) as ws: # warnings for missing values model = pysd.read_vensim(model_mdl, missing_values="ignore") self.assertTrue(all(["missing" not in str(w.message) for w in ws])) with catch_warnings(record=True) as ws: # warnings for missing values model.run() self.assertTrue(all(["missing" not in str(w.message) for w in ws])) with catch_warnings(record=True) as ws: # ignore warnings for missing values model = pysd.load(model_py) self.assertTrue(any(["missing" in str(w.message) for w in ws])) with catch_warnings(record=True) as ws: # ignore warnings for missing values model.run() self.assertTrue(any(["missing" in str(w.message) for w in ws])) with self.assertRaises(ValueError): # errors for missing values pysd.load(model_py, missing_values="raise")
def test_circular_reference(self): import os import pysd model_main = """ from pysd import cache, external from pysd.py_backend.functions import Integ, Delay _subscript_dict = {} _namespace = {'integ': 'integ', 'delay': 'delay'} __pysd_version__ = "1.1.1" __data = {'scope': None, 'time': lambda: 0} def _init_outer_references(data): for key in data: __data[key] = data[key] def time(): return __data["time"]() def time_step(): return 0.5 def initial_time(): return 0 def integ(): return _integ_integ() def delay(): return _delay_delay() _integ_integ = Integ(lambda: 2, lambda: delay(), '_integ_integ') _delay_delay = Delay(lambda: 2, lambda: 1, lambda: integ(), 1, time_step, '_delay_delay') """ model_main = model_main.replace("\n ", "\n") with open("circular.py", "w") as f: f.write(model_main) with self.assertRaises(ValueError) as err: pysd.load("circular.py") os.remove("circular.py") self.assertIn('_integ_integ', str(err.exception)) self.assertIn('_delay_delay', str(err.exception)) self.assertIn('Unresolvable Reference: ' + 'Probable circular initialization...\n' + 'Not able to initialize the ' + 'following objects:', str(err.exception))
def gen_mod_pysd(archivo): nmbr, ext = os.path.splitext(archivo) if ext == '.py': return pysd.load(archivo) arch_py = nmbr + '.py' if os.path.isfile(arch_py) and arch_más_recién(arch_py, archivo): return pysd.load(nmbr + '.py') else: return pysd.read_vensim(archivo) if ext == '.mdl' else pysd.read_xmile(archivo)
def SD_ReadVensim(filename, AutoSelect=True): if filename.split(".")[-1] == ".py": SDmodel = pysd.load(filename) elif AutoSelect: if os.path.isfile(filename[0:-3] + "py"): SDmodel = pysd.load(filename[0:-3] + "py") print("Auto load existed py file.") else: SDmodel = pysd.read_vensim(filename) else: SDmodel = pysd.read_vensim(filename) return SDmodel
def __init__(símismo, archivo, nombre='mds'): ext = os.path.splitext(archivo)[1] if ext == '.mdl': símismo.tipo = '.mdl' símismo.mod = pysd.read_vensim(archivo) elif ext in ['.xmile', '.xml']: símismo.tipo = '.xmile' símismo.mod = pysd.read_xmile(archivo) elif ext == '.py': # Modelos PySD ya traducidos símismo.tipo = '.py' símismo.mod = pysd.load(archivo) else: raise ValueError( _('PySD no sabe leer modelos del formato "{}". Debes darle un modelo ".mdl" o ".xmile".' ).format(ext)) símismo.tipo_mod = None símismo._conv_nombres = {} símismo.tiempo_final = None símismo.cont_simul = False símismo.paso_act = 0 símismo.vars_para_cambiar = {} símismo._res_recién = None # pd.DataFrame super().__init__(archivo, nombre=nombre)
def run_simulation(self): output_variable = self.output.get() Y = np.zeros([self.samples.shape[0]]) for i, X in enumerate(self.samples): print ("\r{} of {}".format(i, len(self.samples)), end = '\r', flush=True) # Creating paramter set for this run parameterset = dict(zip(self.constants_included, X[0:len(self.constants_included)])) # Creating initial_condition initialset = dict(zip(self.initial_included, X[len(self.constants_included):])) if not initialset: model_output = self.model.run(return_columns=[output_variable], params=parameterset, return_timestamps = [self.model.components.final_time()]) else: # Reload the model from the python file. Setting the initial condition will # cause stocks to not reset to its initial value. model = pysd.load(sys.argv[1].split('.')[0] + ".py") model_output = model.run(return_columns=[output_variable], initial_condition=(self.model.components.initial_time(), initialset), params=parameterset, return_timestamps = [self.model.components.final_time()]) Y[i] = model_output.iloc[0][0] # If model fails somehow due to overflow, stop directly. if np.isnan(Y[i]): print ("Calculation is overflowing") break return Y
def test_circular_reference(self): import os import pysd model_main = """ from pysd import cache, external from pysd.py_backend.functions import Integ, Delay _subscript_dict = {} _namespace = {'integ': 'integ', 'delay': 'delay'} __pysd_version__ = "1.1.1" __data = {'scope': None, 'time': lambda: 0} def _init_outer_references(data): for key in data: __data[key] = data[key] def time(): return __data["time"]() def initial_time(): return 0 def integ(): return _integ_integ() def delay(): return _delay_delay() _integ_integ = Integ(lambda: 2, lambda: delay()) _delay_delay = Delay(lambda: 2, lambda: 1, lambda: integ(), 1) """ model_main = model_main.replace("\n ", "\n") with open("circular.py", "w") as f: f.write(model_main) with self.assertRaises(KeyError): pysd.load("circular.py") os.remove("circular.py")
def model(self, data_model, data_files, shared_tmpdir): # translated file file = shared_tmpdir.joinpath(data_model.with_suffix(".py").name) if file.is_file(): # load already translated file return load(file, data_files) else: # copy mdl file to tmp_dir and translate it file = shared_tmpdir.joinpath(data_model.name) shutil.copy(data_model, file) return read_vensim(file, data_files)
def load(model_file, data_files, missing_values, split_views, **kwargs): """ Translate and load model file. Paramters --------- model_file: str Vensim, Xmile or PySD model file. data_files: list If given the list of files where the necessary data to run the model is given. missing_values : str ("warning", "error", "ignore", "keep") What to do with missing values. If "warning" (default) shows a warning message and interpolates the values. If "raise" raises an error. If "ignore" interpolates the values without showing anything. If "keep" it will keep the missing values, this option may cause the integration to fail, but it may be used to check the quality of the data. split_views: bool (optional) If True, the sketch is parsed to detect model elements in each model view, and then translate each view in a separate python file. Setting this argument to True is recommended for large models split in many different views. Default is False. **kwargs: (optional) Additional keyword arguments. subview_sep:(str) Character used to separate views and subviews. If provided, and split_views=True, each submodule will be placed inside the folder of the parent view. Returns ------- pysd.model """ if model_file.lower().endswith(".mdl"): print("\nTranslating model file...\n") return pysd.read_vensim(model_file, initialize=False, data_files=data_files, missing_values=missing_values, split_views=split_views, **kwargs) elif model_file.lower().endswith(".xmile"): print("\nTranslating model file...\n") return pysd.read_xmile(model_file, initialize=False, data_files=data_files, missing_values=missing_values) else: return pysd.load(model_file, initialize=False, data_files=data_files, missing_values=missing_values)
def test_load_type_error(self): import os import pysd # external object old definition with dims ([]) ext = "_ext_data = external.ExtData('input.xlsx', "\ + "'Sheet1', '5', 'B6', None, {}, [], _root, "\ + "'_ext_data')" with open("type_error.py", "w") as f: f.write("from pysd import external") f.write("\n") f.write("_root = './'") f.write("\n") f.write(ext) with self.assertRaises(ImportError): pysd.load("type_error.py") os.remove("type_error.py")
def test_load_different_version_error(self): import os import pysd model_main = """ from pysd import cache, external __data = {'scope': None, 'time': lambda: 0} def _init_outer_references(data): for key in data: __data[key] = data[key] def initial_time(): return 0 """ model_main = model_main.replace("\n ", "\n") # old PySD major version with open("old_version.py", "w") as f: f.write(model_main) f.write("__pysd_version__ = \"0.5.0\"") with self.assertRaises(ImportError): pysd.load("old_version.py") # current PySD major version with open("current_version.py", "w") as f: f.write(model_main) f.write("__pysd_version__ = \"1.99.3\"") pysd.load("current_version.py") os.remove("old_version.py") os.remove("current_version.py")
def _generar_mod(símismo, archivo, **ops_mód): nmbr, ext = os.path.splitext(archivo) if ext == '.py': # Modelos PySD ya traducidos símismo.tipo_mod = '.py' return pysd.load(archivo) else: if ext == '.mdl': símismo.tipo_mod = '.mdl' elif ext in ['.xmile', '.xml']: símismo.tipo_mod = '.xmile' else: raise ValueError( _('PySD no sabe leer modelos del formato "{}". Debes darle un modelo ".py", ".mdl" o ".xmile".' ).format(ext)) # Únicamente recrear el archivo .py si necesario if os.path.isfile(nmbr + '.py') and (os.path.getmtime(nmbr + '.py') > os.path.getmtime(archivo)): return pysd.load(nmbr + '.py') else: return pysd.read_vensim( archivo) if ext == '.mdl' else pysd.read_xmile(archivo)
def model(self, shared_tmpdir, model_path, subview_sep, _root): """ Translate the model or read a translated version. This way each file is only translated once. """ # expected file file = shared_tmpdir.joinpath(model_path.with_suffix(".py").name) if file.is_file(): # load already translated file return pysd.load(file) else: # copy mdl file to tmp_dir and translate it file = shared_tmpdir.joinpath(model_path.name) shutil.copy(_root.joinpath(model_path), file) return pysd.read_vensim( file, split_views=True, subview_sep=subview_sep)
# print(f"{region}-{crop} start running model") try: output_path = Wofost(crop_path, argo_path, soil_path, meteo_path, meteo_name, wave, co2, region, crop_name).init_model() vensim_out_put_map[ f"{crop_name}-{region_name}"] = output_path runned_count += 1 except Exception as ex: erro_count += 1 import traceback print('171', traceback.print_exc()) print(crop_name, region, meteo_name, ex.args) continue # print(f"{region}-{crop} running complete in path {output_path}") print("count of meteo file", count) print("count of success simulation", runned_count) print("count of errors", erro_count) # create_meteo_for_each_crop_of_each_region() # model = pysd.load('amin.py') # # return_columns=keys_in_vensim_output stocks = model.run(return_columns=keys_in_vensim_output) create_new_meteo(stocks) stocks.to_csv("./OutPut/vensim_simualtion_output.csv") running_wensim()
os.makedirs(output_path) except OSError as e: if e.errno != errno.EEXIST: raise new_df = pd.concat([df1,df2]) new_df.to_csv(output_path+"System_model_result.csv", sep=',', encoding='utf-8') return new_df # Run other rounds function def Run_round(model, Input_Data, start, end, Return_columns): System_Go = model.run(initial_condition='current',return_timestamps=range(start,end),params = Input_Data,return_columns=Return_columns) return System_Go #%% # Convert Vensim to Python #SDmodel = pysd.read_vensim("TaoYuanSystem_SDLab_no-loss-rate.mdl") SDmodel = pysd.load("TaoYuanSystem_SDLab_NoLossRate.py") #%% # Read in data Vensim_path = r"C:\Users\Philip\Documents\GitHub\TaoyuanSD\Data for model Test" os.chdir(Vensim_path) Inflow_path = r"Data_inflow_2012.xlsx" Inflow = ExcelToInput(Inflow_path) Allocation_path = r"Data_allocation_2012_Test.xlsx" Allocation = ExcelToInput(Allocation_path) #%% # Forming Input data SDInput = {**Inflow,**Allocation} SDInput["INITIAL TIME"] = 0
def static_test_matrix(mdl_file, matrix=None, excel_file=None, errors='return'): """ Parameters ---------- mdl_file matrix excel_file errors Returns ------- Error matrix """ if matrix: pass elif excel_file: matrix = pd.read_excel(excel_file, index_col=[0, 1, 2]) matrix = matrix.replace('inf', np.inf).replace('-inf', np.inf) else: raise ValueError( 'Must supply a test matrix or refer to an external file') model = pysd.read_vensim(mdl_file) py_mdl_file = model.py_model_file error_list = [] for row_num, (index, row) in enumerate(matrix.iterrows()): try: model = pysd.load(py_mdl_file) result = model.run(params={ index[0]: index[2] }, return_columns=row.index.values, return_timestamps=0).loc[0] for col_num, (key, value) in enumerate(row.items()): try: if value not in ['-', 'x', 'nan', np.nan, '' ] and result[key] != value: error_list.append({ 'Condition': '%s = %s' % (index[0], index[2]), 'Variable': repr(key), 'Expected': repr(value), 'Observed': repr(result[key]), 'Test': '%i.%i' % (row_num, col_num) }) except Exception as e: error_list.append({ 'Condition': '%s = %s' % (index[0], index[2]), 'Variable': repr(key), 'Expected': repr(value), 'Observed': e, 'Test': '%i.%i' % (row_num, col_num) }) except Exception as e: error_list.append({ 'Condition': '%s = %s' % (index[0], index[2]), 'Variable': '', 'Expected': 'Run Error', 'Observed': e, 'Test': '%i.run' % row_num }) if len(error_list) == 0: return None if errors == 'return': df = pd.DataFrame(error_list) df.set_index('Test', inplace=True) return df.sort_values( ['Condition', 'Variable'])[['Condition', 'Variable', 'Expected', 'Observed']] elif errors == 'raise': raise AssertionError([ "When '%(Condition)s', %(Variable)s is %(Observed)s " "instead of %(Expected)s" % e for e in error_list ])