예제 #1
0
파일: test_gc.py 프로젝트: zsyf102900/pyomo
    def test_gc_disable(self):
        self.assertTrue(gc.isenabled())
        pgc = PauseGC()
        self.assertFalse(gc.isenabled())
        with PauseGC():
            self.assertFalse(gc.isenabled())
        self.assertFalse(gc.isenabled())
        pgc.close()
        self.assertTrue(gc.isenabled())

        self.assertTrue(gc.isenabled())
        with PauseGC():
            self.assertFalse(gc.isenabled())
            pgc = PauseGC()
            self.assertFalse(gc.isenabled())
            pgc.close()
            self.assertFalse(gc.isenabled())
        self.assertTrue(gc.isenabled())
예제 #2
0
    def construct(self, data=None):
        """Construct the expression(s) for this constraint."""
        if is_debug_set(logger):
            logger.debug("Constructing constraint %s" % (self.name))
        if self._constructed:
            return
        self._constructed = True

        ref = weakref.ref(self)
        with PauseGC():
            self._data = tuple(
                _MatrixConstraintData(i, ref) for i in range(len(self._lower)))
예제 #3
0
    def construct_instances_for_scenario_tree(
            self,
            scenario_tree,
            profile_memory=False,
            output_instance_construction_time=False,
            compile_scenario_instances=False,
            verbose=False):
        assert not self._closed

        if scenario_tree._scenario_based_data:
            if verbose:
                print("Scenario-based instance initialization enabled")
        else:
            if verbose:
                print("Node-based instance initialization enabled")

        scenario_instances = {}
        for scenario in scenario_tree._scenarios:

            # the construction of instances takes little overhead in terms
            # of memory potentially lost in the garbage-collection sense
            # (mainly only that due to parsing and instance
            # simplification/prep-processing).  to speed things along,
            # disable garbage collection if it enabled in the first place
            # through the instance construction process.
            # IDEA: If this becomes too much for truly large numbers of
            #       scenarios, we could manually collect every time X
            #       instances have been created.
            scenario_instance = None
            with PauseGC() as pgc:
                scenario_instance = \
                    self.construct_scenario_instance(
                        scenario._name,
                        scenario_tree,
                        profile_memory=profile_memory,
                        output_instance_construction_time=output_instance_construction_time,
                        compile_instance=compile_scenario_instances,
                        verbose=verbose)

            scenario_instances[scenario._name] = scenario_instance
            assert scenario_instance.local_name == scenario.name

        return scenario_instances
예제 #4
0
def launch_command(command,
                   options,
                   cmd_args=None,
                   cmd_kwds=None,
                   error_label="",
                   disable_gc=False,
                   profile_count=0,
                   log_level=logging.INFO,
                   traceback=False):
    # This is not the effective level, but the
    # level on the current logger. We want to
    # return the logger to its original state
    # before this function exits
    prev_log_level = logger.level
    logger.setLevel(log_level)

    if cmd_args is None:
        cmd_args = ()
    if cmd_kwds is None:
        cmd_kwds = {}

    #
    # Control the garbage collector - more critical than I would like
    # at the moment.
    #
    with PauseGC(disable_gc) as pgc:

        #
        # Run command - precise invocation depends on whether we want
        # profiling output, traceback, etc.
        #

        rc = 0

        if profile_count > 0:
            # Defer import of profiling packages until we know that they
            # are needed
            try:
                try:
                    import cProfile as profile
                except ImportError:
                    import profile
                import pstats
            except ImportError:
                configure_loggers(shutdown=True)
                raise ValueError(
                    "Cannot use the 'profile' option: the Python "
                    "'profile' or 'pstats' package cannot be imported!")
            #
            # Call the main routine with profiling.
            #
            try:
                tfile = TempfileManager.create_tempfile(suffix=".profile")
                tmp = profile.runctx('command(options, *cmd_args, **cmd_kwds)',
                                     globals(), locals(), tfile)
                p = pstats.Stats(tfile).strip_dirs()
                p.sort_stats('time', 'cumulative')
                p = p.print_stats(profile_count)
                p.print_callers(profile_count)
                p.print_callees(profile_count)
                p = p.sort_stats('cumulative', 'calls')
                p.print_stats(profile_count)
                p.print_callers(profile_count)
                p.print_callees(profile_count)
                p = p.sort_stats('calls')
                p.print_stats(profile_count)
                p.print_callers(profile_count)
                p.print_callees(profile_count)
                TempfileManager.clear_tempfiles()
                rc = tmp
            finally:
                logger.setLevel(prev_log_level)
        else:

            #
            # Call the main PH routine without profiling.
            #
            if traceback:
                try:
                    rc = command(options, *cmd_args, **cmd_kwds)
                finally:
                    logger.setLevel(prev_log_level)
            else:
                try:
                    try:
                        rc = command(options, *cmd_args, **cmd_kwds)
                    except ValueError:
                        sys.stderr.write(error_label + "VALUE ERROR:\n")
                        sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        raise
                    except KeyError:
                        sys.stderr.write(error_label + "KEY ERROR:\n")
                        sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        raise
                    except TypeError:
                        sys.stderr.write(error_label + "TYPE ERROR:\n")
                        sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        raise
                    except NameError:
                        sys.stderr.write(error_label + "NAME ERROR:\n")
                        sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        raise
                    except IOError:
                        sys.stderr.write(error_label + "IO ERROR:\n")
                        sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        raise
                    except ConverterError:
                        sys.stderr.write(error_label + "CONVERTER ERROR:\n")
                        sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        raise
                    except ApplicationError:
                        sys.stderr.write(error_label + "APPLICATION ERROR:\n")
                        sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        raise
                    except RuntimeError:
                        sys.stderr.write(error_label + "RUN-TIME ERROR:\n")
                        sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        raise
                    except:
                        sys.stderr.write(error_label +
                                         "Encountered unhandled exception:\n")
                        if len(sys.exc_info()) > 1:
                            sys.stderr.write(str(sys.exc_info()[1]) + "\n")
                        else:
                            traceback.print_exc(file=sys.stderr)
                        raise
                except:
                    sys.stderr.write("\n")
                    sys.stderr.write(
                        "To obtain further information regarding the "
                        "source of the exception, use the "
                        "--traceback option\n")
                    rc = 1

    #
    # TODO: Once we incorporate options registration into
    #       all of the PySP commands we will assume the
    #       options object is always a PySPConfigBlock
    #
    if isinstance(options, PySPConfigBlock):
        options.check_usage(error=False)

    logger.setLevel(prev_log_level)

    return rc
예제 #5
0
    def _load_model_data(self, modeldata, namespaces, **kwds):
        """
        Load declarations from a DataPortal object.
        """
        #
        # As we are primarily generating objects here (and acyclic ones
        # at that), there is no need to run the GC until the entire
        # model is created.  Simple reference-counting should be
        # sufficient to keep memory use under control.
        #
        with PauseGC() as pgc:

            #
            # Unlike the standard method in the pympler summary
            # module, the tracker doesn't print 0-byte entries to pad
            # out the limit.
            #
            profile_memory = kwds.get('profile_memory', 0)

            if profile_memory >= 2 and pympler_available:
                mem_used = pympler.muppy.get_size(muppy.get_objects())
                print("")
                print("      Total memory = %d bytes prior to model "
                      "construction" % mem_used)

                if profile_memory >= 3:
                    gc.collect()
                    mem_used = pympler.muppy.get_size(muppy.get_objects())
                    print("      Total memory = %d bytes prior to model "
                          "construction (after garbage collection)" % mem_used)

            #
            # Do some error checking
            #
            for namespace in namespaces:
                if not namespace is None and not namespace in modeldata._data:
                    msg = "Cannot access undefined namespace: '%s'"
                    raise IOError(msg % namespace)

            #
            # Initialize each component in order.
            #

            for component_name, component in self.component_map().items():

                if component.ctype is Model:
                    continue

                self._initialize_component(modeldata, namespaces,
                                           component_name, profile_memory)

            # Note: As is, connectors are expanded when using command-line pyomo but not calling model.create(...) in a Python script.
            # John says this has to do with extension points which are called from commandline but not when writing scripts.
            # Uncommenting the next two lines switches this (command-line fails because it tries to expand connectors twice)
            #connector_expander = ConnectorExpander()
            #connector_expander.apply(instance=self)

            if profile_memory >= 2 and pympler_available:
                print("")
                print(
                    "      Summary of objects following instance construction")
                post_construction_summary = pympler.summary.summarize(
                    pympler.muppy.get_objects())
                pympler.summary.print_(post_construction_summary, limit=100)
                print("")
예제 #6
0
    def __call__(self, model, output_filename, solver_capability, io_options):

        # Make sure not to modify the user's dictionary,
        # they may be reusing it outside of this call
        io_options = dict(io_options)

        # Skip writing constraints whose body section is
        # fixed (i.e., no variables)
        skip_trivial_constraints = \
            io_options.pop("skip_trivial_constraints", False)

        # Use full Pyomo component names in the LP file rather
        # than shortened symbols (slower, but useful for debugging).
        symbolic_solver_labels = \
            io_options.pop("symbolic_solver_labels", False)

        output_fixed_variable_bounds = \
            io_options.pop("output_fixed_variable_bounds", False)

        # If False, unused variables will not be included in
        # the LP file. Otherwise, include all variables in
        # the bounds sections.
        include_all_variable_bounds = \
            io_options.pop("include_all_variable_bounds", False)

        labeler = io_options.pop("labeler", None)

        # Specify how much effort do we want to put into ensuring the
        # LP file is written deterministically for a Pyomo model:
        #    0 : None
        #    1 : sort keys of indexed components (default)
        #    2 : sort keys AND sort names (over declaration order)
        file_determinism = io_options.pop("file_determinism", 1)

        # Specify orderings for variable and constraint output
        row_order = io_options.pop("row_order", None)
        column_order = io_options.pop("column_order", None)

        # Make sure the ONE_VAR_CONSTANT variable appears in
        # the objective even if the constant part of the
        # objective is zero
        force_objective_constant = \
            io_options.pop("force_objective_constant", False)

        if len(io_options):
            raise ValueError(
                "ProblemWriter_cpxlp passed unrecognized io_options:\n\t" +
                "\n\t".join("%s = %s" % (k, v) for k, v in io_options.items()))

        if symbolic_solver_labels and (labeler is not None):
            raise ValueError("ProblemWriter_cpxlp: Using both the "
                             "'symbolic_solver_labels' and 'labeler' "
                             "I/O options is forbidden")

        #
        # Create labeler
        #
        if symbolic_solver_labels:
            labeler = TextLabeler()
        elif labeler is None:
            labeler = NumericLabeler('x')

        # Clear the collection of referenced variables.
        self._referenced_variable_ids.clear()

        if output_filename is None:
            output_filename = model.name + ".lp"

        # when sorting, there are a non-trivial number of
        # temporary objects created. these all yield
        # non-circular references, so disable GC - the
        # overhead is non-trivial, and because references
        # are non-circular, everything will be collected
        # immediately anyway.
        with PauseGC() as pgc:
            with open(output_filename, "w") as output_file:
                symbol_map = self._print_model_LP(
                    model,
                    output_file,
                    solver_capability,
                    labeler,
                    output_fixed_variable_bounds=output_fixed_variable_bounds,
                    file_determinism=file_determinism,
                    row_order=row_order,
                    column_order=column_order,
                    skip_trivial_constraints=skip_trivial_constraints,
                    force_objective_constant=force_objective_constant,
                    include_all_variable_bounds=include_all_variable_bounds)

        self._referenced_variable_ids.clear()

        return output_filename, symbol_map
예제 #7
0
    def __call__(self, model, output_filename, solver_capability, io_options):
        """
        Write a model in the GAMS modeling language format.

        Keyword Arguments
        -----------------
        output_filename: str
            Name of file to write GAMS model to. Optionally pass a file-like
            stream and the model will be written to that instead.
        io_options: dict
            - warmstart=True
                Warmstart by initializing model's variables to their values.
            - symbolic_solver_labels=False
                Use full Pyomo component names rather than
                shortened symbols (slower, but useful for debugging).
            - labeler=None
                Custom labeler. Incompatible with symbolic_solver_labels.
            - solver=None
                If None, GAMS will use default solver for model type.
            - mtype=None
                Model type. If None, will chose from lp, nlp, mip, and minlp.
            - add_options=None
                List of additional lines to write directly
                into model file before the solve statement.
                For model attributes, <model name> is GAMS_MODEL.
            - skip_trivial_constraints=False
                Skip writing constraints whose body section is fixed.
            - file_determinism=1
                | How much effort do we want to put into ensuring the
                | GAMS file is written deterministically for a Pyomo model:
                |     0 : None
                |     1 : sort keys of indexed components (default)
                |     2 : sort keys AND sort names (over declaration order)
            - put_results=None
                Filename for optionally writing solution values and
                marginals.  If put_results_format is 'gdx', then GAMS
                will write solution values and marginals to
                GAMS_MODEL_p.gdx and solver statuses to
                {put_results}_s.gdx.  If put_results_format is 'dat',
                then solution values and marginals are written to
                (put_results).dat, and solver statuses to (put_results +
                'stat').dat.
            - put_results_format='gdx'
                Format used for put_results, one of 'gdx', 'dat'.

        """

        # Make sure not to modify the user's dictionary,
        # they may be reusing it outside of this call
        io_options = dict(io_options)

        # Use full Pyomo component names rather than
        # shortened symbols (slower, but useful for debugging).
        symbolic_solver_labels = io_options.pop("symbolic_solver_labels",
                                                False)

        # Custom labeler option. Incompatible with symbolic_solver_labels.
        labeler = io_options.pop("labeler", None)

        # If None, GAMS will use default solver for model type.
        solver = io_options.pop("solver", None)

        # If None, will chose from lp, nlp, mip, and minlp.
        mtype = io_options.pop("mtype", None)

        # Improved GAMS calling options
        solprint = io_options.pop("solprint", "off")
        limrow = io_options.pop("limrow", 0)
        limcol = io_options.pop("limcol", 0)
        solvelink = io_options.pop("solvelink", 5)

        # Lines to add before solve statement.
        add_options = io_options.pop("add_options", None)

        # Skip writing constraints whose body section is
        # fixed (i.e., no variables)
        skip_trivial_constraints = \
            io_options.pop("skip_trivial_constraints", False)

        # How much effort do we want to put into ensuring the
        # GAMS file is written deterministically for a Pyomo model:
        #    0 : None
        #    1 : sort keys of indexed components (default)
        #    2 : sort keys AND sort names (over declaration order)
        file_determinism = io_options.pop("file_determinism", 1)
        sorter_map = {
            0: SortComponents.unsorted,
            1: SortComponents.deterministic,
            2: SortComponents.sortBoth
        }
        sort = sorter_map[file_determinism]

        # Warmstart by initializing model's variables to their values.
        warmstart = io_options.pop("warmstart", True)

        # Filename for optionally writing solution values and marginals
        # Set to True by GAMSSolver
        put_results = io_options.pop("put_results", None)
        put_results_format = io_options.pop("put_results_format", 'gdx')
        assert put_results_format in ('gdx', 'dat')

        if len(io_options):
            raise ValueError(
                "GAMS writer passed unrecognized io_options:\n\t" +
                "\n\t".join("%s = %s" % (k, v) for k, v in io_options.items()))

        if solver is not None and solver.upper() not in valid_solvers:
            raise ValueError("GAMS writer passed unrecognized solver: %s" %
                             solver)

        if mtype is not None:
            valid_mtypes = set([
                'lp', 'qcp', 'nlp', 'dnlp', 'rmip', 'mip', 'rmiqcp', 'rminlp',
                'miqcp', 'minlp', 'rmpec', 'mpec', 'mcp', 'cns', 'emp'
            ])
            if mtype.lower() not in valid_mtypes:
                raise ValueError("GAMS writer passed unrecognized "
                                 "model type: %s" % mtype)
            if (solver is not None
                    and mtype.upper() not in valid_solvers[solver.upper()]):
                raise ValueError("GAMS writer passed solver (%s) "
                                 "unsuitable for given model type (%s)" %
                                 (solver, mtype))

        if output_filename is None:
            output_filename = model.name + ".gms"

        if symbolic_solver_labels and (labeler is not None):
            raise ValueError("GAMS writer: Using both the "
                             "'symbolic_solver_labels' and 'labeler' "
                             "I/O options is forbidden")

        if symbolic_solver_labels:
            # Note that the Var and Constraint labelers must use the
            # same labeler, so that we can correctly detect name
            # collisions (which can arise when we truncate the labels to
            # the max allowable length.  GAMS requires all identifiers
            # to start with a letter.  We will (randomly) choose "s_"
            # (for 'shortened')
            var_labeler = con_labeler = ShortNameLabeler(
                60,
                prefix='s_',
                suffix='_',
                caseInsensitive=True,
                legalRegex='^[a-zA-Z]')
        elif labeler is None:
            var_labeler = NumericLabeler('x')
            con_labeler = NumericLabeler('c')
        else:
            var_labeler = con_labeler = labeler

        var_list = []

        def var_recorder(obj):
            ans = var_labeler(obj)
            try:
                if obj.is_variable_type():
                    var_list.append(ans)
            except:
                pass
            return ans

        def var_label(obj):
            #if obj.is_fixed():
            #    return str(value(obj))
            return symbolMap.getSymbol(obj, var_recorder)

        symbolMap = SymbolMap(var_label)

        # when sorting, there are a non-trivial number of
        # temporary objects created. these all yield
        # non-circular references, so disable GC - the
        # overhead is non-trivial, and because references
        # are non-circular, everything will be collected
        # immediately anyway.
        with PauseGC() as pgc:
            try:
                if isinstance(output_filename, str):
                    output_file = open(output_filename, "w")
                else:
                    # Support passing of stream such as a StringIO
                    # on which to write the model file
                    output_file = output_filename
                self._write_model(
                    model=model,
                    output_file=output_file,
                    solver_capability=solver_capability,
                    var_list=var_list,
                    var_label=var_label,
                    symbolMap=symbolMap,
                    con_labeler=con_labeler,
                    sort=sort,
                    skip_trivial_constraints=skip_trivial_constraints,
                    warmstart=warmstart,
                    solver=solver,
                    mtype=mtype,
                    solprint=solprint,
                    limrow=limrow,
                    limcol=limcol,
                    solvelink=solvelink,
                    add_options=add_options,
                    put_results=put_results,
                    put_results_format=put_results_format,
                )
            finally:
                if isinstance(output_filename, str):
                    output_file.close()

        return output_filename, symbolMap
예제 #8
0
 def preprocess(self, preprocessor=None):
     """Apply the preprocess plugins defined by the user"""
     with PauseGC() as pgc:
         if preprocessor is None:
             preprocessor = self.config.preprocessor
         PyomoAPIFactory(preprocessor)(self.config, model=self)