def solve_relaxed(self, mdl, prio_name, relaxable_groups, relax_mode, parameters=None): # --- 1 serialize job_name = normalize_basename(mdl.name, force_lowercase=True) model_data = self.serialize_model(mdl) docloud_parameters = mdl.parameters prm_data = self._serialize_parameters(docloud_parameters, write_level=1, relax_mode=relax_mode) prm_name = self._make_attachment_name(job_name, '.prm') feasopt_data = self._serialize_relaxables(relaxable_groups) # --- dump if need be if prio_name: prio_name = "_%s" % prio_name relax_basename = normalize_basename("%s_feasopt%s" % (mdl.name, prio_name)) prm_basename = normalize_basename("%s_feasopt" % mdl.name) self._dump_if_required(model_data, mdl, basename=job_name, extension=".lp", forced=True) self._dump_if_required(feasopt_data, mdl, basename=relax_basename, extension=FeasibilityPrinter.extension, forced=True) self._dump_if_required(prm_data, mdl, basename=prm_basename, extension=".prm", forced=True) # --- submit job somehow... attachments = [] model_name = normalize_basename(job_name) + self._exchange_format.extension attachments.append(self._make_attachment(model_name, model_data)) attachments.append(self._make_attachment(prm_name, prm_data)) attachments.append(self._make_attachment(normalize_basename(job_name) + FeasibilityPrinter.extension, feasopt_data)) # here we go... def notify_info(info): if "jobid" in info: mdl.fire_jobid(jobid=info["jobid"]) if "progress" in info: mdl.fire_progress(progress_data=info["progress"]) connector = self._connector mdl.notify_start_solve() connector.submit_model_data(attachments, gzip=not self._exchange_format.is_binary, info_callback=notify_info, info_to_monitor={'jobid', 'progress'}) # --- cplex solve details json_details = connector.get_cplex_details() self._solve_details = SolveDetails.from_json(json_details) # --- # --- build a solution object, or None solution_handler = JSONSolutionHandler(connector.results.get('solution.json')) if not solution_handler.has_solution: mdl.notify_solve_failed() return None else: infeas_json = connector.results.get('infeasibilities.json') infeas_handler = JSONInfeasibilityHandler(infeas_json) if infeas_json else None sol = self._make_relaxed_solution(mdl, solution_handler, infeas_handler) return sol
def export_as_mst(self, path=None, basename=None, write_level=WriteLevel.Auto, **kwargs): """ Exports a solution to a file in CPLEX mst format. Args: basename: Controls the basename with which the solution is printed. Accepts None, a plain string, or a string format. If None, the model's name is used. If passed a plain string, the string is used in place of the model's name. If passed a string format (either with %s or {0}), this format is used to format the model name to produce the basename of the written file. path: A path to write the file, expects a string path or None. Can be a directory, in which case the basename that was computed with the basename argument is appended to the directory to produce the file. If given a full path, the path is directly used to write the file, and the basename argument is not used. If passed None, the output directory will be ``tempfile.gettempdir()``. write_level: an enumerated value which controls which variables are printed. The default is WriteLevel.Auto, which prints the values of all discrete variables. This parameter also accepts the number values of the corresponding CPLEX parameters (1 for AllVars, 2 for DiscreteVars, 3 for NonZeroVars, 4 for NonZeroDiscreteVars) Returns: The full path of the file, when successful, else None Examples: Assuming the solution has the name "prob": ``sol.export_as_mst()`` will write file prob.mst in a temporary directory. ``sol.export_as_mst(write_level=WriteLevel.ALlvars)`` will write file prob.mst in a temporary directory, and will print all variables in the problem. ``sol.export_as_mst(path="c:/temp/myprob1.mst")`` will write file "c:/temp/myprob1.mst". ``sol.export_as_mst(basename="my_%s_mipstart", path ="z:/home/")`` will write "z:/home/my_prob_mipstart.mst". See Also: :class:`docplex.mp.constants.WriteLevel` """ sol_basename = normalize_basename(self.problem_name, force_lowercase=True) mst_path = make_output_path2( actual_name=sol_basename, extension=SolutionMSTPrinter.mst_extension, path=path, basename_fmt=basename) if mst_path: kwargs2 = kwargs.copy() kwargs2['write_level'] = WriteLevel.parse(write_level) SolutionMSTPrinter.print_to_stream(self, mst_path, **kwargs2) return mst_path
def solve(self, mdl, parameters=None, **kwargs): # Before submitting the job, we will build the list of attachments # parameters are CPLEX parameters lex_mipstart = kwargs.pop('_lex_mipstart', None) attachments = [] # make sure model is the first attachment: that will be the name of the job on the console job_name = normalize_basename("python_%s" % mdl.name) model_file = self.serialize_model_as_file(mdl) try: model_data_name = self._make_attachment_name( job_name, self._exchange_format.extension) attachments.append({ 'name': model_data_name, 'filename': model_file }) # prm docloud_parameters = parameters if parameters is not None else mdl.parameters prm_data = self._serialize_parameters(docloud_parameters) prm_name = self._make_attachment_name(job_name, '.prm') attachments.append({'name': prm_name, 'data': prm_data}) # warmstart_data # export mipstart solution in CPLEX mst format, if any, else None # if within a lexicographic solve, th elex_mipstart supersedes allother mipstarts if lex_mipstart: mipstart_name = lex_mipstart.name.lower( ) if lex_mipstart.name else job_name warmstart_data = SolutionMSTPrinter.print_to_string( lex_mipstart).encode('utf-8') warmstart_name = self._make_attachment_name( mipstart_name, ".mst") attachments.append({ 'name': warmstart_name, 'data': warmstart_data }) elif mdl.number_of_mip_starts: mipstart_name = job_name warmstart_name = self._make_attachment_name( mipstart_name, ".mst") mdl_mipstarts = [s for s, _ in mdl.iter_mip_starts()] mdl_efforts = [eff for (_, eff) in mdl.iter_mip_starts()] warmstart_data = SolutionMSTPrinter.print_to_string( mdl_mipstarts, effort_level=mdl_efforts, use_lp_names=True).encode('utf-8') attachments.append({ 'name': warmstart_name, 'data': warmstart_data }) # benders annotation if mdl.has_benders_annotations(): anno_data = ModelAnnotationPrinter.print_to_string(mdl).encode( 'utf-8') anno_name = self._make_attachment_name(job_name, '.ann') attachments.append({'name': anno_name, 'data': anno_data}) # info_to_monitor = {'jobid'} # if mdl.progress_listeners: # info_to_monitor.add('progress') def notify_info(info): if "jobid" in info: mdl.fire_jobid(jobid=info["jobid"]) if "progress" in info: mdl.fire_progress(progress_data=info["progress"]) # This block used to be try/catched for DOcloudConnector exceptions # and DOcloudException, but then infrastructure error were not # handled properly. Now we let the exception raise. connector = self._connector mdl.notify_start_solve() connector.submit_model_data( attachments, gzip=not self._exchange_format.is_binary, info_callback=notify_info, info_to_monitor={'jobid', 'progress'}) # --- cplex solve details json_details = connector.get_cplex_details() self._solve_details = SolveDetails.from_json(json_details) self._solve_details._quality_metrics = self._compute_quality_metrics( json_details) # --- # --- build a solution object, or None solution_handler = JSONSolutionHandler( connector.results.get('solution.json')) if not solution_handler.has_solution: mdl.notify_solve_failed() solution = None else: solution = self._make_solution(mdl, solution_handler) # --- return solution finally: if os.path.isfile(model_file): os.remove(model_file)
def refine_conflict(self, mdl, preferences=None, groups=None, parameters=None): """ Starts conflict refiner on the model. Args: mdl: The model for which conflict refinement is performed. preferences: a dictionary defining constraints preferences. groups: a list of ConstraintsGroup. :parameters: cplex parameters . Returns: A list of "TConflictConstraint" namedtuples, each tuple corresponding to a constraint that is involved in the conflict. The fields of the "TConflictConstraint" namedtuple are: - the name of the constraint or None if the constraint corresponds to a variable lower or upper bound - a reference to the constraint or to a wrapper representing a Var upper or lower bound - an :enum:'docplex.mp.constants.ConflictStatus' object that indicates the conflict status type (Excluded, Possible_member, Member...) This list is empty if no conflict is found by the conflict refiner. """ # Before submitting the job, we will build the list of attachments attachments = [] # make sure model is the first attachment: that will be the name of the job on the console job_name = "python_%s" % self._model.name model_data = self.serialize_model(self._model) mprinter = self._new_printer(ctx=mdl.context) model_name = normalize_basename( job_name) + self._exchange_format.extension attachments.append({'name': model_name, 'data': model_data}) # Conflict Refiner Ext artifact_as_xml = CPLEXRefineConflictExtArtifact() if groups is None or groups == []: # Add all constraints artifact_as_xml.add_constraints( ct_type_by_constraint_type[VarUbConstraintWrapper], [(VarUbConstraintWrapper(v), mprinter._var_print_name(v)) for v in self._model.iter_variables()], preference=1.0) artifact_as_xml.add_constraints( ct_type_by_constraint_type[VarLbConstraintWrapper], [(VarLbConstraintWrapper(v), mprinter._var_print_name(v)) for v in self._model.iter_variables()], preference=1.0) artifact_as_xml.add_constraints( ct_type_by_constraint_type[LinearConstraint], [(c, mprinter.linearct_print_name(c)) for c in self._model.iter_linear_constraints()], preference_dict=preferences) artifact_as_xml.add_constraints( ct_type_by_constraint_type[QuadraticConstraint], [(c, mprinter.qc_print_name(c)) for c in self._model.iter_quadratic_constraints()], preference_dict=preferences) artifact_as_xml.add_constraints( ct_type_by_constraint_type[IndicatorConstraint], [(c, mprinter.logicalct_print_name(c)) for c in self._model.iter_indicator_constraints()], preference_dict=preferences) else: for grp in groups: group = artifact_as_xml.add_group(grp.preference) for ct in grp._cts: artifact_as_xml.add_constraint_to_group(group, ct) conflict_refiner_data = self._serialize_conflict_refiner( artifact_as_xml) attachments.append({ 'name': feasibility_name, 'data': conflict_refiner_data }) def notify_info(info): if "jobid" in info: self._model.fire_jobid(jobid=info["jobid"]) if "progress" in info: self._model.fire_progress(progress_data=info["progress"]) # This block used to be try/catched for DOcloudConnector exceptions # and DOcloudException, but then infrastructure error were not # handled properly. Now we let the exception raise. connector = self._connector self._model.notify_start_solve() connector.submit_model_data(attachments, gzip=not self._exchange_format.is_binary, info_callback=notify_info, info_to_monitor={'jobid', 'progress'}) # --- build a conflict object, or None conflicts_handler = JSONConflictHandler( connector.results.get('conflict.json'), artifact_as_xml._grps_dict) if not conflicts_handler.has_conflict: return [] else: return self._get_conflicts_cloud(conflicts_handler)