def solve_high_level(gams_folder,sim_folder,output_lst=False): """Use higher level apis to run GAMSy""" # create GAMS workspace: try: from gams import GamsWorkspace ws = GamsWorkspace(system_directory=str(gams_folder), debug=3) shutil.copy(os.path.join(sim_folder, 'UCM_h.gms'), ws.working_directory) shutil.copy(os.path.join(sim_folder, 'Inputs.gdx'), ws.working_directory) shutil.copy(os.path.join(sim_folder, 'cplex.opt'), ws.working_directory) t1 = ws.add_job_from_file('UCM_h.gms') opt = ws.add_options() # Do not create .lst file if not output_lst: if sys.platform == 'win32': opt.output = 'nul' else: opt.output = '/dev/null' time0 = time.time() status = t1.run(opt) except Exception as e: if 'optCreateD' in str(e): logging.error('The GAMS solver can only be run once in the same console. Please open another console') sys.exit(1) else: logging.error('The following error occured when trying to solve the model in gams: ' + str(e)) sys.exit(1) # copy the result file to the simulation environment folder: shutil.copy(os.path.join(ws.working_directory, 'Results.gdx'), sim_folder) for filename in ['UCM_h.lst','UCM_h.log','debug.gdx']: if os.path.isfile(os.path.join(ws.working_directory, filename)): shutil.copy(os.path.join(ws.working_directory, filename), sim_folder) logging.info('Completed simulation in {0:.2f} seconds'.format(time.time() - time0)) return status
def ler_resultado(): from gams import GamsWorkspace ws = GamsWorkspace(working_directory="./files") # add a new GamsDatabase and initialize it from the GDX file just created db2 = ws.add_database_from_gdx("out.gdx") x = dict((tuple(rec.keys), rec.level) for rec in db2["x"]) alocacao = [] for r in x: if x[r] == 1: alocacao.append(r) lotacao = Lotacao.objects.filter(turma__in=turmas) resultado = [] for a in alocacao: for l in lotacao: # if int(a[0]) == l.disciplina.pk: # a[0] -> disciplina # a[1] -> semana # a[2] -> turno # a[3] -> horario # a[4] -> dia da semana if a[0] == l.disciplina.nome: resultado.append([l, a[1], a[4], a[2], a[3]]) return resultado;
def ratmarg_SUT(table_in,EORA=False): data_path = load_config()['paths']['data'] table_in.prep_data() load_db_SUT(table_in) ''' RUN SCRIPT WITH DISRUPTION ''' setdir = os.path.join(data_path,'gams_runs') ws = GamsWorkspace(setdir) ws.get_working_directory() gamsfile_in = os.path.join(setdir,"obtain_marg_value_SUT.gms") gamsfile = os.path.join(setdir,"obtain_marg_value_SUT_{}.gms".format(table_in.name)) copyfile(gamsfile_in, gamsfile) str_ctry = ','.join(table_in.countries) str_fd = 'FinalD' #','.join(list(table_in.FD_labels['FD'].unique())) with open(gamsfile, 'r') as file: # read a list of lines into data data = file.readlines() gdx_file = "%s.gdx" % table_in.name data[30] = '$GDXIN '+gdx_file+'\n' str_ind = ','.join(table_in.sectors) data[40] ='ind(col) list of industries /'+str_ind+'/\n' data[37] = '/'+str_ctry+'/\n' data[39] = '/'+str_ctry+'/\n' data[43] = '/'+str_fd+'/\n' with open(gamsfile, 'w') as file: file.writelines( data ) gamsfile_run = gamsfile.replace("..\\..\\gams_runs\\", "") t1 = ws.add_job_from_file(gamsfile_run) t1.run() Ratmarg = [] index_ = [] for rec in t1.out_db["Ratmarg"]: index_.append((rec.keys[0],rec.keys[1])) Ratmarg.append(rec.get_value()) index_ = pd.MultiIndex.from_tuples(index_, names=('CNTRY', 'IND')) Ratmarginal = pd.DataFrame(Ratmarg, index=index_).unstack() Ratmarginal.columns = Ratmarginal.columns.droplevel() Ratmarginal.to_csv(os.path.join(data_path,'input_data','Ratmarg_{}.csv'.format(table_in.name))) return Ratmarginal
def _get_version(self): """Returns a tuple describing the solver executable version.""" if not self.available(exception_flag=False): return _extract_version('') from gams import GamsWorkspace ws = GamsWorkspace() version = tuple(int(i) for i in ws._version.split('.')[:4]) while (len(version) < 4): version += (0, ) return version
def teste_gams_linux(): from gams import GamsWorkspace import platform from gradehoraria import settings working_directory = settings.BASE_DIR if platform.system() == 'Linux': GAMS_PATH = '/opt/gams28.2' else: GAMS_PATH = 'c:/GAMS' ws = GamsWorkspace(system_directory=GAMS_PATH, working_directory=working_directory)
def solve(self, *args, **kwds): """ Solve a model via the GAMS Python API. Keyword Arguments ----------------- tee=False: bool Output GAMS log to stdout. logfile=None: str Filename to output GAMS log to a file. load_solutions=True: bool Load solution into model. If False, the results object will contain the solution data. keepfiles=False: bool Keep temporary files. Equivalent of DebugLevel.KeepFiles. Summary of temp files can be found in _gams_py_gjo0.pf tmpdir=None: str Specify directory path for storing temporary files. A directory will be created if one of this name doesn't exist. By default uses the system default temporary path. report_timing=False: bool Print timing reports for presolve, solver, postsolve, etc. io_options: dict Options that get passed to the writer. See writer in pyomo.repn.plugins.gams_writer for details. Updated with any other keywords passed to solve method. """ # Make sure available() doesn't crash self.available() from gams import GamsWorkspace, DebugLevel from gams.workspace import GamsExceptionExecution if len(args) != 1: raise ValueError('Exactly one model must be passed ' 'to solve method of GAMSSolver.') model = args[0] # self.options are default for each run, overwritten by kwds options = dict() options.update(self.options) options.update(kwds) load_solutions = options.pop("load_solutions", True) tee = options.pop("tee", False) logfile = options.pop("logfile", None) keepfiles = options.pop("keepfiles", False) tmpdir = options.pop("tmpdir", None) report_timing = options.pop("report_timing", False) io_options = options.pop("io_options", {}) # Pass remaining keywords to writer, which will handle # any unrecognized arguments io_options.update(options) initial_time = time.time() #################################################################### # Presolve #################################################################### # Create StringIO stream to pass to gams_writer, on which the # model file will be written. The writer also passes this StringIO # back, but output_file is defined in advance for clarity. output_file = StringIO() if isinstance(model, IBlock): # Kernel blocks have slightly different write method smap_id = model.write(filename=output_file, format=ProblemFormat.gams, _called_by_solver=True, **io_options) symbolMap = getattr(model, "._symbol_maps")[smap_id] else: (_, smap_id) = model.write(filename=output_file, format=ProblemFormat.gams, io_options=io_options) symbolMap = model.solutions.symbol_map[smap_id] presolve_completion_time = time.time() if report_timing: print(" %6.2f seconds required for presolve" % (presolve_completion_time - initial_time)) #################################################################### # Apply solver #################################################################### # IMPORTANT - only delete the whole tmpdir if the solver was the one # that made the directory. Otherwise, just delete the files the solver # made, if not keepfiles. That way the user can select a directory # they already have, like the current directory, without having to # worry about the rest of the contents of that directory being deleted. newdir = True if tmpdir is not None and os.path.exists(tmpdir): newdir = False ws = GamsWorkspace( debug=DebugLevel.KeepFiles if keepfiles else DebugLevel.Off, working_directory=tmpdir) t1 = ws.add_job_from_string(output_file.getvalue()) try: with OutputStream(tee=tee, logfile=logfile) as output_stream: t1.run(output=output_stream) except GamsExceptionExecution as e: try: if e.rc == 3: # Execution Error check_expr_evaluation(model, symbolMap, 'direct') finally: # Always name working directory or delete files, # regardless of any errors. if keepfiles: print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted t1 = rec = rec_lo = rec_hi = None file_removal_gams_direct(tmpdir, newdir) raise except: # Catch other errors and remove files first if keepfiles: print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted t1 = rec = rec_lo = rec_hi = None file_removal_gams_direct(tmpdir, newdir) raise solve_completion_time = time.time() if report_timing: print(" %6.2f seconds required for solver" % (solve_completion_time - presolve_completion_time)) #################################################################### # Postsolve #################################################################### # import suffixes must be on the top-level model if isinstance(model, IBlock): model_suffixes = list(comp.storage_key for comp \ in pyomo.core.kernel.suffix.\ import_suffix_generator(model, active=True, descend_into=False)) else: model_suffixes = list(name for (name,comp) \ in pyomo.core.base.suffix.\ active_import_suffix_generator(model)) extract_dual = ('dual' in model_suffixes) extract_rc = ('rc' in model_suffixes) results = SolverResults() results.problem.name = t1.name results.problem.lower_bound = t1.out_db["OBJEST"].find_record().value results.problem.upper_bound = t1.out_db["OBJEST"].find_record().value results.problem.number_of_variables = \ t1.out_db["NUMVAR"].find_record().value results.problem.number_of_constraints = \ t1.out_db["NUMEQU"].find_record().value results.problem.number_of_nonzeros = \ t1.out_db["NUMNZ"].find_record().value results.problem.number_of_binary_variables = None # Includes binary vars: results.problem.number_of_integer_variables = \ t1.out_db["NUMDVAR"].find_record().value results.problem.number_of_continuous_variables = \ t1.out_db["NUMVAR"].find_record().value \ - t1.out_db["NUMDVAR"].find_record().value results.problem.number_of_objectives = 1 # required by GAMS writer obj = list(model.component_data_objects(Objective, active=True)) assert len(obj) == 1, 'Only one objective is allowed.' obj = obj[0] objctvval = t1.out_db["OBJVAL"].find_record().value if obj.is_minimizing(): results.problem.sense = ProblemSense.minimize results.problem.upper_bound = objctvval else: results.problem.sense = ProblemSense.maximize results.problem.lower_bound = objctvval results.solver.name = "GAMS " + str(self.version()) # Init termination condition to None to give preference to this first # block of code, only set certain TC's below if it's still None results.solver.termination_condition = None results.solver.message = None solvestat = t1.out_db["SOLVESTAT"].find_record().value if solvestat == 1: results.solver.status = SolverStatus.ok elif solvestat == 2: results.solver.status = SolverStatus.ok results.solver.termination_condition = TerminationCondition.maxIterations elif solvestat == 3: results.solver.status = SolverStatus.ok results.solver.termination_condition = TerminationCondition.maxTimeLimit elif solvestat == 5: results.solver.status = SolverStatus.ok results.solver.termination_condition = TerminationCondition.maxEvaluations elif solvestat == 7: results.solver.status = SolverStatus.aborted results.solver.termination_condition = TerminationCondition.licensingProblems elif solvestat == 8: results.solver.status = SolverStatus.aborted results.solver.termination_condition = TerminationCondition.userInterrupt elif solvestat == 10: results.solver.status = SolverStatus.error results.solver.termination_condition = TerminationCondition.solverFailure elif solvestat == 11: results.solver.status = SolverStatus.error results.solver.termination_condition = TerminationCondition.internalSolverError elif solvestat == 4: results.solver.status = SolverStatus.warning results.solver.message = "Solver quit with a problem (see LST file)" elif solvestat in (9, 12, 13): results.solver.status = SolverStatus.error elif solvestat == 6: results.solver.status = SolverStatus.unknown results.solver.return_code = 0 # Not sure if this value is actually user time # "the elapsed time it took to execute a solve statement in total" results.solver.user_time = t1.out_db["ETSOLVE"].find_record().value results.solver.system_time = None results.solver.wallclock_time = None results.solver.termination_message = None soln = Solution() modelstat = t1.out_db["MODELSTAT"].find_record().value if modelstat == 1: results.solver.termination_condition = TerminationCondition.optimal soln.status = SolutionStatus.optimal elif modelstat == 2: results.solver.termination_condition = TerminationCondition.locallyOptimal soln.status = SolutionStatus.locallyOptimal elif modelstat in [3, 18]: results.solver.termination_condition = TerminationCondition.unbounded soln.status = SolutionStatus.unbounded elif modelstat in [4, 5, 6, 10, 19]: results.solver.termination_condition = TerminationCondition.infeasible soln.status = SolutionStatus.infeasible elif modelstat == 7: results.solver.termination_condition = TerminationCondition.feasible soln.status = SolutionStatus.feasible elif modelstat == 8: # 'Integer solution model found' results.solver.termination_condition = TerminationCondition.optimal soln.status = SolutionStatus.optimal elif modelstat == 9: results.solver.termination_condition = TerminationCondition.intermediateNonInteger soln.status = SolutionStatus.other elif modelstat == 11: # Should be handled above, if modelstat and solvestat both # indicate a licensing problem if results.solver.termination_condition is None: results.solver.termination_condition = TerminationCondition.licensingProblems soln.status = SolutionStatus.error elif modelstat in [12, 13]: if results.solver.termination_condition is None: results.solver.termination_condition = TerminationCondition.error soln.status = SolutionStatus.error elif modelstat == 14: if results.solver.termination_condition is None: results.solver.termination_condition = TerminationCondition.noSolution soln.status = SolutionStatus.unknown elif modelstat in [15, 16, 17]: # Having to do with CNS models, # not sure what to make of status descriptions results.solver.termination_condition = TerminationCondition.optimal soln.status = SolutionStatus.unsure else: # This is just a backup catch, all cases are handled above soln.status = SolutionStatus.error soln.gap = abs(results.problem.upper_bound \ - results.problem.lower_bound) for sym, ref in iteritems(symbolMap.bySymbol): obj = ref() if isinstance(model, IBlock): # Kernel variables have no 'parent_component' if obj.ctype is IObjective: soln.objective[sym] = {'Value': objctvval} if obj.ctype is not IVariable: continue else: if obj.parent_component().type() is Objective: soln.objective[sym] = {'Value': objctvval} if obj.parent_component().type() is not Var: continue rec = t1.out_db[sym].find_record() # obj.value = rec.level soln.variable[sym] = {"Value": rec.level} if extract_rc and not math.isnan(rec.marginal): # Do not set marginals to nan # model.rc[obj] = rec.marginal soln.variable[sym]['rc'] = rec.marginal if extract_dual: for c in model.component_data_objects(Constraint, active=True): if c.body.is_fixed() or \ (not (c.has_lb() or c.has_ub())): # the constraint was not sent to GAMS continue sym = symbolMap.getSymbol(c) if c.equality: rec = t1.out_db[sym].find_record() if not math.isnan(rec.marginal): # model.dual[c] = rec.marginal soln.constraint[sym] = {'dual': rec.marginal} else: # Solver didn't provide marginals, # nothing else to do here break else: # Inequality, assume if 2-sided that only # one side's marginal is nonzero # Negate marginal for _lo equations marg = 0 if c.lower is not None: rec_lo = t1.out_db[sym + '_lo'].find_record() marg -= rec_lo.marginal if c.upper is not None: rec_hi = t1.out_db[sym + '_hi'].find_record() marg += rec_hi.marginal if not math.isnan(marg): # model.dual[c] = marg soln.constraint[sym] = {'dual': marg} else: # Solver didn't provide marginals, # nothing else to do here break results.solution.insert(soln) if keepfiles: print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted t1 = rec = rec_lo = rec_hi = None file_removal_gams_direct(tmpdir, newdir) #################################################################### # Finish with results #################################################################### results._smap_id = smap_id results._smap = None if isinstance(model, IBlock): if len(results.solution) == 1: results.solution(0).symbol_map = \ getattr(model, "._symbol_maps")[results._smap_id] results.solution(0).default_variable_value = \ self._default_variable_value if load_solutions: model.load_solution(results.solution(0)) else: assert len(results.solution) == 0 # see the hack in the write method # we don't want this to stick around on the model # after the solve assert len(getattr(model, "._symbol_maps")) == 1 delattr(model, "._symbol_maps") del results._smap_id if load_solutions and \ (len(results.solution) == 0): logger.error("No solution is available") else: if load_solutions: model.solutions.load_from(results) results._smap_id = None results.solution.clear() else: results._smap = model.solutions.symbol_map[smap_id] model.solutions.delete_symbol_map(smap_id) postsolve_completion_time = time.time() if report_timing: print(" %6.2f seconds required for postsolve" % (postsolve_completion_time - solve_completion_time)) print(" %6.2f seconds required total" % (postsolve_completion_time - initial_time)) return results
import os import sys from timeit import default_timer as timer from gams import GamsWorkspace from gams.database import GamsDatabase from gams import GamsVariable # Read execution parameters args = sys.argv assert len(args) > 2, "GAMS path and input path must be specified." ws = GamsWorkspace(system_directory=args[1]) input_path: str = os.path.abspath(args[2]) assert input_path[-4:] == ".gdx", f"{input_path} is not a .gdx file." dir_path: str = os.path.split(input_path)[0] db: GamsDatabase = ws.add_database_from_gdx(input_path) output: GamsDatabase = ws.add_database() # Find the dummy variables by searching for symbols starting with 'exo_' or 'endo_' exo_dummies = [symbol for symbol in db if symbol.name[:4] == "exo_"] endo_dummies = [symbol for symbol in db if symbol.name[:5] == "endo_"] endo_exo_strings = [] for dummy in exo_dummies: var_name = dummy.name[4:] if len(db.get_symbol(var_name)) > 1: endo_exo_strings += [ f"{var_name}.fx{rec.keys} = {var_name}.l{rec.keys};" for rec in dummy ]
Carbon_matrix.append(Carbonh) OPEX_matrix.append(OPEXh) x_limit_bot_opex_matrix.append(x_limit_bot_opex_h) x_limit_top_opex_matrix.append(x_limit_top_opex_h) x_limit_bot_co2_matrix.append(x_limit_bot_co2_h) x_limit_top_co2_matrix.append(x_limit_top_co2_h) ############################################ ### generate GAMS gdx file ### ############################################ GAMS_model = "Strategic.gms" ws = GamsWorkspace() db =ws.add_database() time_set = np.char.mod('%d', year) store_set = np.char.mod('%d', Store_id_range[:stores]) tech_set = np.array(tech_range) split_set = np.char.mod('%d', np.arange(split**2)) tech = db.add_set("tech",1,"") t = db.add_set("t",1,"") s = db.add_set("s",1,"") d = db.add_set("d",1,"") for n in tech_set: tech.add_record(n)
def load_db_IO(table_in,EORA=False,RoW=None): data_path = load_config()['paths']['data'] '''CREATE GAMS WORKSPACE''' ws = GamsWorkspace(os.path.join(data_path,'gams_runs')) ''' CREATE INPUT FILES GAMS GDX ''' db = ws.add_database() #set regions reg = db.add_set("reg",1,"Regions") if EORA is True: for r in (table_in.countries+['ROW']): reg.add_record(r) else: for r in (table_in.countries): reg.add_record(r) #set rowcol rowcol = db.add_set("rowcol",1,"All rows and columns") if EORA is True: industries = list(table_in.sectors) + ['Total'] final_demand = list(table_in.FD_labels['FD'].unique()) else: industries = list(table_in.sectors) final_demand = list(table_in.FD_labels['tfd'].unique()) Import_lab = ['Import'] Export_lab = ['Export'] VA_lab = ['VA'] rowcol_input = industries + final_demand + VA_lab + Import_lab + Export_lab for r in (rowcol_input): rowcol.add_record(r) #set row row = db.add_set("row",1,"All rows") row_input = industries + VA_lab + Import_lab for r in (row_input): row.add_record(r) #set col col = db.add_set("col",1,"All columns") col_input = industries + final_demand for r in (col_input): col.add_record(r) #set industries industries_ = db.add_set("S",1,"Industries") for r in industries: industries_.add_record(r) #set FinalD fd_ = GamsParameter(db,"FinDem_ini", 4, "FinDem") for k, v in table_in.FinalD.items(): fd_.add_record(k).value = v #set interaction matrix of intermediate demand z_m = db.add_parameter("Z_matrix_ini", 4, "Interaction matrix") for k, v in table_in.Z_matrix.items(): z_m.add_record(k).value = v #set interaction matrix of intermediate demand a_m = db.add_parameter("A_matrix_ini", 4, "A matrix") for k, v in table_in.A_matrix.items(): a_m.add_record(k).value = v if EORA is not True: #set Export ROW exp = db.add_parameter("ExpROW_ini", 3, "Exports to ROW") for k, v in table_in.ExpROW.items(): exp.add_record(k).value = v #set ValueA val = db.add_parameter("ValueA_ini", 3, "Value Added") for k, v in table_in.ValueA.items(): val.add_record(k).value = v # And save to GDX file db.export(os.path.join(data_path,"gams_runs","{}.gdx".format(table_in.name)))
def load_db_SUT(table_in,RoW=None): data_path = load_config()['paths']['data'] '''CREATE GAMS WORKSPACE''' ws = GamsWorkspace(os.path.join(data_path,'gams_runs')) ''' CREATE INPUT FILES GAMS GDX ''' db = ws.add_database() #set regions reg = db.add_set("reg",1,"Regions") for r in (table_in.countries): reg.add_record(r) #set rowcol rowcol = db.add_set("rowcol",1,"All rows and columns") industries = list(table_in.sectors) products = list(table_in.products) final_demand = ['FinalD'] Import_lab = ['Import'] Export_lab = ['Export'] VA_lab = ['VA'] rowcol_input = industries + final_demand + Export_lab + products + VA_lab for r in (rowcol_input): rowcol.add_record(r) #set row row = db.add_set("row",1,"All rows") row_input = products + VA_lab + Import_lab for r in (row_input): row.add_record(r) #set col col = db.add_set("col",1,"All columns") col_input = industries + final_demand for r in (col_input): col.add_record(r) #set industries industries_ = db.add_set("ind",1,"Industries") for r in industries: industries_.add_record(r) #set Use table use_m = db.add_parameter("REG_USE2013", 4, "Interaction matrix") for k, v in table_in.Use.items(): use_m.add_record(k).value = v #set Supply table sup_m = db.add_parameter("REG_SUP2013", 4, "Interaction matrix") for k, v in table_in.Sup.items(): sup_m.add_record(k).value = v #set export ROW exp = db.add_parameter("ExpROW_ini", 3, "Exports to ROW") for k, v in table_in.ExpROW.items(): exp.add_record(k).value = v #set export ROW imp = db.add_parameter("ImpROW_ini", 3, "Imports from ROW") for k, v in table_in.ImpROW.items(): imp.add_record(k).value = v #set ValueA val = db.add_parameter("ValueA_ini", 3, "Value Added") for k, v in table_in.ValueA.items(): val.add_record(k).value = v # And save to GDX file db.export(os.path.join(data_path,"gams_runs","{}.gdx".format(table_in.name)))
def solve(self, *args, **kwds): """ Uses GAMS Python API. For installation help visit: https://www.gams.com/latest/docs/apis/examples_python/index.html tee=False: Output GAMS log to stdout. load_solutions=True: Does not support load_solutions=False. keepfiles=False: Keep temporary files. Equivalent of DebugLevel.KeepFiles. Summary of temp files can be found in _gams_py_gjo0.pf tmpdir=None: Specify directory path for storing temporary files. A directory will be created if one of this name doesn't exist. io_options: Updated with additional keywords passed to solve() warmstart=False: Warmstart by initializing model's variables to their values. symbolic_solver_labels=False: Use full Pyomo component names rather than shortened symbols (slower, but useful for debugging). labeler=None: Custom labeler option. Incompatible with symbolic_solver_labels. solver=None: If None, GAMS will use default solver for model type. mtype=None: Model type. If None, will chose from lp, nlp, mip, and minlp. add_options=None: List of additional lines to write directly into model file before the solve statement. For model attributes, <model name> is GAMS_MODEL. skip_trivial_constraints=False: Skip writing constraints whose body section is fixed file_determinism=1: How much effort do we want to put into ensuring the LP file is written deterministically for a Pyomo model: 0 : None 1 : sort keys of indexed components (default) 2 : sort keys AND sort names (over declaration order) put_results=None: Filename for optionally writing solution values and marginals to (put_results).dat, and solver statuses to (put_results + 'stat').dat. """ # Make sure available() doesn't crash self.available() from gams import GamsWorkspace, DebugLevel from gams.workspace import GamsExceptionExecution if len(args) != 1: raise ValueError('Exactly one model must be passed ' 'to solve method of GAMSSolver.') model = args[0] load_solutions = kwds.pop("load_solutions", True) tee = kwds.pop("tee", False) keepfiles = kwds.pop("keepfiles", False) tmpdir = kwds.pop("tmpdir", None) io_options = kwds.pop("io_options", {}) if len(kwds): # Pass remaining keywords to writer, which will handle # any unrecognized arguments io_options.update(kwds) #################################################################### # Presolve #################################################################### # Create StringIO stream to pass to gams_writer, on which the # model file will be written. The writer also passes this StringIO # back, but output_file is defined in advance for clarity. output_file = StringIO() if isinstance(model, IBlockStorage): # Kernel blocks have slightly different write method smap_id = model.write(filename=output_file, format=ProblemFormat.gams, _called_by_solver=True, **io_options) symbolMap = getattr(model, "._symbol_maps")[smap_id] else: (_, smap_id) = model.write(filename=output_file, format=ProblemFormat.gams, io_options=io_options) symbolMap = model.solutions.symbol_map[smap_id] #################################################################### # Apply solver #################################################################### # IMPORTANT - only delete the whole tmpdir if the solver was the one # that made the directory. Otherwise, just delete the files the solver # made, if not keepfiles. That way the user can select a directory # they already have, like the current directory, without having to # worry about the rest of the contents of that directory being deleted. newdir = True if tmpdir is not None and os.path.exists(tmpdir): newdir = False ws = GamsWorkspace( debug=DebugLevel.KeepFiles if keepfiles else DebugLevel.Off, working_directory=tmpdir) t1 = ws.add_job_from_string(output_file.getvalue()) try: t1.run(output=sys.stdout if tee else None) except GamsExceptionExecution: try: check_expr_evaluation(model, symbolMap, 'direct') finally: # Always name working directory or delete files, # regardless of any errors. if keepfiles: print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted t1 = rec = rec_lo = rec_hi = None file_removal_gams_direct(tmpdir, newdir) raise except: # Catch other errors and remove files first if keepfiles: print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted t1 = rec = rec_lo = rec_hi = None file_removal_gams_direct(tmpdir, newdir) raise #################################################################### # Postsolve #################################################################### # import suffixes must be on the top-level model if isinstance(model, IBlockStorage): model_suffixes = list(name for (name,comp) \ in pyomo.core.kernel.component_suffix.\ import_suffix_generator(model, active=True, descend_into=False, return_key=True)) else: model_suffixes = list(name for (name,comp) \ in pyomo.core.base.suffix.\ active_import_suffix_generator(model)) extract_dual = ('dual' in model_suffixes) extract_rc = ('rc' in model_suffixes) results = SolverResults() results.problem.name = t1.name results.problem.lower_bound = t1.out_db["OBJEST"].find_record().value results.problem.upper_bound = t1.out_db["OBJEST"].find_record().value results.problem.number_of_variables = \ t1.out_db["NUMVAR"].find_record().value results.problem.number_of_constraints = \ t1.out_db["NUMEQU"].find_record().value results.problem.number_of_nonzeros = \ t1.out_db["NUMNZ"].find_record().value results.problem.number_of_binary_variables = None # Includes binary vars: results.problem.number_of_integer_variables = \ t1.out_db["NUMDVAR"].find_record().value results.problem.number_of_continuous_variables = \ t1.out_db["NUMVAR"].find_record().value \ - t1.out_db["NUMDVAR"].find_record().value results.problem.number_of_objectives = 1 # required by GAMS writer obj = list(model.component_data_objects(Objective, active=True)) assert len(obj) == 1, 'Only one objective is allowed.' obj = obj[0] objctvval = t1.out_db["OBJVAL"].find_record().value if obj.is_minimizing(): results.problem.sense = ProblemSense.minimize results.problem.upper_bound = objctvval else: results.problem.sense = ProblemSense.maximize results.problem.lower_bound = objctvval results.solver.name = "GAMS " + str(self.version()) # Init termination condition to None to give preference to this first # block of code, only set certain TC's below if it's still None results.solver.termination_condition = None results.solver.message = None solvestat = t1.out_db["SOLVESTAT"].find_record().value if solvestat == 1: results.solver.status = SolverStatus.ok elif solvestat == 2: results.solver.status = SolverStatus.ok results.solver.termination_condition = TerminationCondition.maxIterations elif solvestat == 3: results.solver.status = SolverStatus.ok results.solver.termination_condition = TerminationCondition.maxTimeLimit elif solvestat == 5: results.solver.status = SolverStatus.ok results.solver.termination_condition = TerminationCondition.maxEvaluations elif solvestat == 7: results.solver.status = SolverStatus.aborted results.solver.termination_condition = TerminationCondition.licensingProblems elif solvestat == 8: results.solver.status = SolverStatus.aborted results.solver.termination_condition = TerminationCondition.userInterrupt elif solvestat == 10: results.solver.status = SolverStatus.error results.solver.termination_condition = TerminationCondition.solverFailure elif solvestat == 11: results.solver.status = SolverStatus.error results.solver.termination_condition = TerminationCondition.internalSolverError elif solvestat == 4: results.solver.status = SolverStatus.warning results.solver.message = "Solver quit with a problem (see LST file)" elif solvestat in (9, 12, 13): results.solver.status = SolverStatus.error elif solvestat == 6: results.solver.status = SolverStatus.unknown results.solver.return_code = 0 # Not sure if this value is actually user time # "the elapsed time it took to execute a solve statement in total" results.solver.user_time = t1.out_db["ETSOLVE"].find_record().value results.solver.system_time = None results.solver.wallclock_time = None results.solver.termination_message = None soln = Solution() modelstat = t1.out_db["MODELSTAT"].find_record().value if modelstat == 1: results.solver.termination_condition = TerminationCondition.optimal soln.status = SolutionStatus.optimal elif modelstat == 2: results.solver.termination_condition = TerminationCondition.locallyOptimal soln.status = SolutionStatus.locallyOptimal elif modelstat in [3, 18]: results.solver.termination_condition = TerminationCondition.unbounded soln.status = SolutionStatus.unbounded elif modelstat in [4, 5, 6, 10, 19]: results.solver.termination_condition = TerminationCondition.infeasible soln.status = SolutionStatus.infeasible elif modelstat == 7: results.solver.termination_condition = TerminationCondition.feasible soln.status = SolutionStatus.feasible elif modelstat == 8: # 'Integer solution model found' results.solver.termination_condition = TerminationCondition.optimal soln.status = SolutionStatus.optimal elif modelstat == 9: results.solver.termination_condition = TerminationCondition.intermediateNonInteger soln.status = SolutionStatus.other elif modelstat == 11: # Should be handled above, if modelstat and solvestat both # indicate a licensing problem if results.solver.termination_condition is None: results.solver.termination_condition = TerminationCondition.licensingProblems soln.status = SolutionStatus.error elif modelstat in [12, 13]: if results.solver.termination_condition is None: results.solver.termination_condition = TerminationCondition.error soln.status = SolutionStatus.error elif modelstat == 14: if results.solver.termination_condition is None: results.solver.termination_condition = TerminationCondition.noSolution soln.status = SolutionStatus.unknown elif modelstat in [15, 16, 17]: # Having to do with CNS models, # not sure what to make of status descriptions results.solver.termination_condition = TerminationCondition.optimal soln.status = SolutionStatus.unsure else: # This is just a backup catch, all cases are handled above soln.status = SolutionStatus.error soln.gap = abs(results.problem.upper_bound \ - results.problem.lower_bound) for sym, ref in iteritems(symbolMap.bySymbol): obj = ref() if isinstance(model, IBlockStorage): # Kernel variables have no 'parent_component' if obj.ctype is Objective: soln.objective[sym] = {'Value': objctvval} if obj.ctype is not Var: continue else: if obj.parent_component().type() is Objective: soln.objective[sym] = {'Value': objctvval} if obj.parent_component().type() is not Var: continue rec = t1.out_db[sym].find_record() # obj.value = rec.level soln.variable[sym] = {"Value": rec.level} if extract_rc and not math.isnan(rec.marginal): # Do not set marginals to nan # model.rc[obj] = rec.marginal soln.variable[sym]['rc'] = rec.marginal if extract_dual: for c in model.component_data_objects(Constraint, active=True): if c.body.is_fixed(): continue sym = symbolMap.getSymbol(c) if c.equality: rec = t1.out_db[sym].find_record() if not math.isnan(rec.marginal): # model.dual[c] = rec.marginal soln.constraint[sym] = {'dual': rec.marginal} else: # Solver didn't provide marginals, # nothing else to do here break else: # Inequality, assume if 2-sided that only # one side's marginal is nonzero # Negate marginal for _lo equations marg = 0 if c.lower is not None: rec_lo = t1.out_db[sym + '_lo'].find_record() marg -= rec_lo.marginal if c.upper is not None: rec_hi = t1.out_db[sym + '_hi'].find_record() marg += rec_hi.marginal if not math.isnan(marg): # model.dual[c] = marg soln.constraint[sym] = {'dual': marg} else: # Solver didn't provide marginals, # nothing else to do here break results.solution.insert(soln) if keepfiles: print("\nGAMS WORKING DIRECTORY: %s\n" % ws.working_directory) elif tmpdir is not None: # Garbage collect all references to t1.out_db # So that .gdx file can be deleted t1 = rec = rec_lo = rec_hi = None file_removal_gams_direct(tmpdir, newdir) #################################################################### # Finish with results #################################################################### results._smap_id = smap_id results._smap = None if isinstance(model, IBlockStorage): if len(results.solution) == 1: results.solution(0).symbol_map = \ getattr(model, "._symbol_maps")[results._smap_id] results.solution(0).default_variable_value = \ self._default_variable_value if load_solutions: model.load_solution(results.solution(0)) results.solution.clear() else: assert len(results.solution) == 0 # see the hack in the write method # we don't want this to stick around on the model # after the solve assert len(getattr(model, "._symbol_maps")) == 1 delattr(model, "._symbol_maps") del results._smap_id else: if load_solutions: model.solutions.load_from(results) results._smap_id = None results.solution.clear() else: results._smap = model.solutions.symbol_map[smap_id] model.solutions.delete_symbol_map(smap_id) return results
def ratmarg_IO(table_in): """ Estimate marginal values of the rationing variable in GAMS. GAMS is required, as the marginal values of a variable are not returned in the free python solvers. Parameters - table_in - **io_basic** class object, containing all IO data Outputs - pandas DataFrame with the marginal values of the rationing variable """ data_path = 'C:\\Dropbox\\OIA\\Argentina\\Data' #load_config()['paths']['data'] table_in.prep_data() load_db_IO(table_in) """ RUN SCRIPT WITH DISRUPTION """ setdir = os.path.join(data_path, 'gams_runs') ws = GamsWorkspace(setdir) ws.get_working_directory() gamsfile_in = os.path.join(data_path, "gams_runs", "obtain_marg_value.gms") gamsfile = os.path.join(data_path, "gams_runs", "obtain_marg_value_{}.gms".format(table_in.name)) copyfile(gamsfile_in, gamsfile) str_ctry = ','.join(table_in.regions) str_fd = ','.join(list(table_in.FD_labels['tfd'].unique())) with open(gamsfile, 'r') as file: # read a list of lines into data data = file.readlines() gdx_file = "{}.gdx".format(table_in.name) data[26] = '$GDXIN ' + gdx_file + '\n' str_ind = ','.join(table_in.sectors) data[32] = 'S(col) list of industries /' + str_ind + '/\n' data[34] = '/' + str_ctry + '/\n' data[36] = '/' + str_ctry + '/\n' data[38] = '/' + str_fd + '/\n' with open(gamsfile, 'w') as file: file.writelines(data) gamsfile_run = gamsfile.replace("..\\..\\gams_runs\\", "") print(gamsfile_run) t1 = ws.add_job_from_file(gamsfile_run) t1.run() Ratmarg = [] index_ = [] for rec in t1.out_db["Ratmarg"]: index_.append((rec.keys[0], rec.keys[1])) Ratmarg.append(rec.get_value()) index_ = pd.MultiIndex.from_tuples(index_, names=('CNTRY', 'IND')) Ratmarginal = pd.DataFrame(Ratmarg, index=index_).unstack() Ratmarginal.columns = Ratmarginal.columns.droplevel() Ratmarginal.to_csv( os.path.join(data_path, 'input_data', 'Ratmarg_{}.csv'.format(table_in.name))) return Ratmarginal
def load_db_IO(table_in): """ Load the Input-Output data from the **io_basic** Class object and converts it to a GAMS .gdx file. Parameters - table_in - **io_basic** class object, containing all IO data Outputs - .gdx file of the IO data """ data_path = 'C:\\Dropbox\\OIA\\Argentina\\Data' #load_config()['paths']['data'] """CREATE GAMS WORKSPACE""" ws = GamsWorkspace(os.path.join(data_path, 'gams_runs')) """ CREATE INPUT FILES GAMS GDX """ db = ws.add_database() # set regions reg = db.add_set("reg", 1, "Regions") for r in (table_in.regions): reg.add_record(r) # set rowcol rowcol = db.add_set("rowcol", 1, "All rows and columns") industries = list(table_in.sectors) final_demand = list(table_in.FD_labels['tfd'].unique()) Import_lab = ['Import'] Export_lab = ['Export'] VA_lab = ['VA'] rowcol_input = industries + final_demand + VA_lab + Import_lab # + Export_lab for r in (rowcol_input): rowcol.add_record(r) # set row row = db.add_set("row", 1, "All rows") row_input = industries + VA_lab + Import_lab for r in (row_input): row.add_record(r) # set col col = db.add_set("col", 1, "All columns") col_input = industries + final_demand for r in (col_input): col.add_record(r) # set industries industries_ = db.add_set("S", 1, "Industries") for r in industries: industries_.add_record(r) # set FinalD fd_ = GamsParameter(db, "FinDem_ini", 4, "FinDem") for k, v in table_in.FinalD.items(): fd_.add_record(k).value = v # set interaction matrix of intermediate demand z_m = db.add_parameter("Z_matrix_ini", 4, "Interaction matrix") for k, v in table_in.Z_matrix.items(): z_m.add_record(k).value = v # set interaction matrix of intermediate demand a_m = db.add_parameter("A_matrix_ini", 4, "A matrix") for k, v in table_in.A_matrix.items(): a_m.add_record(k).value = v # set Export ROW exp = db.add_parameter("ExpROW_ini", 3, "Exports to ROW") for k, v in table_in.ExpROW.items(): exp.add_record(k).value = v # set ValueA val = db.add_parameter("ValueA_ini", 3, "Value Added") for k, v in table_in.ValueA.items(): val.add_record(k).value = v # And save to GDX file db.export( os.path.join(data_path, "gams_runs", "{}.gdx".format(table_in.name)))
def solve_GAMS(sim_folder, gams_folder=None, work_dir=None, output_lst=False): if not package_exists('gams'): logging.warning( 'Could not import gams. Trying to automatically locate gdxcc folder' ) if not import_local_lib('gams'): return False if not os.path.exists(gams_folder): logging.warn('The provided path for GAMS (' + gams_folder + ') does not exist. Trying to locate...') gams_folder = get_gams_path() if not os.path.exists(gams_folder): logging.error('GAMS path cannot be located. Simulation is stopped') return False sim_folder = sim_folder.encode() gams_folder = gams_folder.encode() if is_sim_folder_ok(sim_folder): # create GAMS workspace: from gams import GamsWorkspace try: ws = GamsWorkspace(system_directory=gams_folder, working_directory=work_dir, debug=3) shutil.copy(os.path.join(sim_folder, 'UCM_h.gms'), ws.working_directory) shutil.copy(os.path.join(sim_folder, 'Inputs.gdx'), ws.working_directory) t1 = ws.add_job_from_file('UCM_h.gms') opt = ws.add_options() #Do not create .lst file if not output_lst: if sys.platform == 'win32': opt.output = 'nul' else: opt.output = '/dev/null' time0 = time.time() t1.run(opt) except Exception as e: if 'optCreateD' in str(e): logging.error( 'The GAMS solver can only be run once in the same console. Please open another console' ) sys.exit(1) else: logging.error( 'The following error occured when trying to solve the model in gams: ' + str(e)) sys.exit(1) logging.info( 'Completed simulation in {0:.2f} seconds'.format(time.time() - time0)) # copy the result file to the simulation environment folder: shutil.copy(os.path.join(ws.working_directory, 'Results.gdx'), sim_folder) for filename in ['UCM_h.lst', 'UCM_h.log', 'debug.gdx']: if os.path.isfile(os.path.join(ws.working_directory, 'debug.gdx')): shutil.copy(os.path.join(ws.working_directory, 'debug.gdx'), sim_folder) if os.path.isfile(os.path.join(ws.working_directory, 'debug.gdx')): logging.warn( 'A debug file was created. There has probably been an optimization error' ) if os.path.isfile('warn.log'): shutil.copy('warn.log', os.path.join(sim_folder, 'warn_solve.log')) else: return False