Exemplo n.º 1
0
def apply_postprocessing(data, instance=None, results=None):
    """
    Apply post-processing steps.

    Required:
        instance:   Problem instance.
        results:    Optimization results object.
    """
    #
    if not data.options.runtime.logging == 'quiet':
        sys.stdout.write('[%8.2f] Applying Pyomo postprocessing actions\n' %
                         (time.time() - start_time))
        sys.stdout.flush()

    # options are of type ConfigValue, not raw strings / atomics.
    for config_value in data.options.postprocess:
        postprocess = pyutilib.misc.import_file(config_value, clear_cache=True)
        if "pyomo_postprocess" in dir(postprocess):
            postprocess.pyomo_postprocess(data.options, instance, results)

    for ep in ExtensionPoint(IPyomoScriptPostprocess):
        ep.apply(options=data.options, instance=instance, results=results)

    if (pympler_available is True) and (data.options.runtime.profile_memory >=
                                        1):
        mem_used = muppy.get_size(muppy.get_objects())
        if mem_used > data.local.max_memory:
            data.local.max_memory = mem_used
        print("   Total memory = %d bytes upon termination" % mem_used)
Exemplo n.º 2
0
Arquivo: misc.py Projeto: smars8/pyomo
def load_extensions(names, ep_type):
    import pyomo.environ

    plugins = ExtensionPoint(ep_type)

    active_plugins = []
    for this_extension in names:
        module, _ = load_external_module(this_extension)
        assert module is not None
        for name, obj in inspect.getmembers(module, inspect.isclass):
            # the second condition gets around goofyness related
            # to issubclass returning True when the obj is the
            # same as the test class.
            if issubclass(obj, SingletonPlugin) and \
               (name != "SingletonPlugin"):
                for plugin in plugins(all=True):
                    if isinstance(plugin, obj):
                        active_plugins.append(plugin)

    return tuple(active_plugins)
Exemplo n.º 3
0
Arquivo: misc.py Projeto: smars8/pyomo
def parse_command_line(args,
                       register_options_callback,
                       with_extensions=None,
                       **kwds):
    import pyomo.pysp.plugins
    pyomo.pysp.plugins.load()
    from pyomo.pysp.util.config import _domain_tuple_of_str

    registered_extensions = {}
    if with_extensions is not None:
        for name in with_extensions:
            plugins = ExtensionPoint(with_extensions[name])
            for plugin in plugins(all=True):
                registered_extensions.setdefault(name,[]).\
                    append(plugin.__class__.__module__)

    def _get_argument_parser(options):
        # if we modify this and don't copy it,
        # the this output will appear twice the second
        # time this function gets called
        _kwds = dict(kwds)
        if len(registered_extensions) > 0:
            assert with_extensions is not None
            epilog = _kwds.pop('epilog',"")
            if epilog != "":
                epilog += "\n\n"
            epilog += "Registered Extensions:\n"
            for name in registered_extensions:
                epilog += " - "+str(with_extensions[name].__name__)+": "
                epilog += str(registered_extensions[name])+"\n"
            _kwds['epilog'] = epilog
        ap = argparse.ArgumentParser(
            add_help=False,
            formatter_class=argparse.RawDescriptionHelpFormatter,
            **_kwds)
        options.initialize_argparse(ap)
        ap.add_argument("-h", "--help", dest="show_help",
                        action="store_true", default=False,
                        help="show this help message and exit")
        return ap

    #
    # Register options
    #
    options = PySPConfigBlock()
    register_options_callback(options)

    if with_extensions is not None:
        for name in with_extensions:
            configval = options.get(name, None)
            assert configval is not None
            assert configval._domain is _domain_tuple_of_str

    ap = _get_argument_parser(options)
    # First parse known args, then import any extension plugins
    # specified by the user, regenerate the options block and
    # reparse to pick up plugin specific registered options
    opts, _ = ap.parse_known_args(args=args)
    options.import_argparse(opts)
    extensions = {}
    if with_extensions is None:
        if opts.show_help:
            pass
    else:
        if all(len(options.get(name).value()) == 0
               for name in with_extensions) and \
               opts.show_help:
            ap.print_help()
            sys.exit(0)
        for name in with_extensions:
            extensions[name] = load_extensions(
                options.get(name).value(),
                with_extensions[name])

    # regenerate the options
    options = PySPConfigBlock()
    register_options_callback(options)
    for name in extensions:
        for plugin in extensions[name]:
            if isinstance(plugin, PySPConfiguredObject):
                plugin.register_options(options)
        # do a dummy access to option to prevent
        # a warning about it not being used
        options.get(name).value()

    ap = _get_argument_parser(options)
    opts = ap.parse_args(args=args)
    options.import_argparse(opts)
    for name in extensions:
        for plugin in extensions[name]:
            if isinstance(plugin, PySPConfiguredObject):
                plugin.set_options(options)
    if opts.show_help:
        ap.print_help()
        sys.exit(0)

    if with_extensions:
        for name in extensions:
            extensions[name] = sort_extensions_by_precedence(extensions[name])
        return options, extensions
    else:
        return options
Exemplo n.º 4
0
def process_results(data, instance=None, results=None, opt=None):
    """
    Process optimization results.

    Required:
        instance:   Problem instance.
        results:    Optimization results object.
        opt:        Optimizer object.
    """
    #
    if not data.options.runtime.logging == 'quiet':
        sys.stdout.write('[%8.2f] Processing results\n' %
                         (time.time() - start_time))
        sys.stdout.flush()
    #
    if data.options.postsolve.print_logfile:
        print("")
        print("==========================================================")
        print("Solver Logfile: " + str(opt._log_file))
        print("==========================================================")
        print("")
        with open(opt._log_file, "r") as INPUT:
            for line in INPUT:
                sys.stdout.write(line)
        print("==========================================================")
        print("Solver Logfile - END")
        print("==========================================================")
    #
    try:
        # transform the results object into human-readable names.
        instance.solutions.store_to(results)
    except Exception:
        print("Problem updating solver results")
        raise
    #
    if not data.options.postsolve.show_results:
        if data.options.postsolve.save_results:
            results_file = data.options.postsolve.save_results
        elif data.options.postsolve.results_format == 'yaml':
            results_file = 'results.yml'
        else:
            results_file = 'results.json'
        results.write(filename=results_file,
                      format=data.options.postsolve.results_format)
        if not data.options.runtime.logging == 'quiet':
            print("    Number of solutions: " + str(len(results.solution)))
            if len(results.solution) > 0:
                print("    Solution Information")
                print("      Gap: " + str(results.solution[0].gap))
                print("      Status: " + str(results.solution[0].status))
                if len(results.solution[0].objective) == 1:
                    key = list(results.solution[0].objective.keys())[0]
                    print("      Function Value: " +
                          str(results.solution[0].objective[key]['Value']))
            print("    Solver results file: " + results_file)
    #
    #ep = ExtensionPoint(IPyomoScriptPrintResults)
    if data.options.postsolve.show_results:
        print("")
        results.write(num=1, format=data.options.postsolve.results_format)
        print("")
    #
    if data.options.postsolve.summary:
        print("")
        print("==========================================================")
        print("Solution Summary")
        print("==========================================================")
        if len(results.solution(0).variable) > 0:
            print("")
            display(instance)
            print("")
        else:
            print("No solutions reported by solver.")
    #
    for ep in ExtensionPoint(IPyomoScriptPrintResults):
        ep.apply(options=data.options, instance=instance, results=results)
    #
    for ep in ExtensionPoint(IPyomoScriptSaveResults):
        ep.apply(options=data.options, instance=instance, results=results)
    #
    if (pympler_available is True) and (data.options.runtime.profile_memory >=
                                        1):
        global memory_data
        mem_used = muppy.get_size(muppy.get_objects())
        if mem_used > data.local.max_memory:
            data.local.max_memory = mem_used
        print("   Total memory = %d bytes following results processing" %
              mem_used)
Exemplo n.º 5
0
def apply_preprocessing(data, parser=None):
    """
    Execute preprocessing files

    Required:
        parser: Command line parser object

    Returned:
        error: This is true if an error has occurred.
    """
    data.local = pyutilib.misc.Options()
    #
    if not data.options.runtime.logging == 'quiet':
        sys.stdout.write('[%8.2f] Applying Pyomo preprocessing actions\n' %
                         (time.time() - start_time))
        sys.stdout.flush()
    #
    global filter_excepthook
    #
    #
    # Setup solver and model
    #
    #
    if len(data.options.model.filename) == 0:
        parser.print_help()
        data.error = True
        return data
    #
    if not data.options.preprocess is None:
        for config_value in data.options.preprocess:
            preprocess = pyutilib.misc.import_file(config_value,
                                                   clear_cache=True)
    #
    for ep in ExtensionPoint(IPyomoScriptPreprocess):
        ep.apply(options=data.options)
    #
    # Verify that files exist
    #
    for file in [data.options.model.filename
                 ] + data.options.data.files.value():
        if not os.path.exists(file):
            raise IOError("File " + file + " does not exist!")
    #
    filter_excepthook = True
    data.local.usermodel = pyutilib.misc.import_file(
        data.options.model.filename, clear_cache=True)
    filter_excepthook = False

    usermodel_dir = dir(data.local.usermodel)
    data.local._usermodel_plugins = []
    for key in modelapi:
        if key in usermodel_dir:

            class TMP(Plugin):
                implements(modelapi[key], service=True)

                def __init__(self):
                    self.fn = getattr(data.local.usermodel, key)

                def apply(self, **kwds):
                    return self.fn(**kwds)

            tmp = TMP()
            data.local._usermodel_plugins.append(tmp)
            #print "HERE", modelapi[key], pyomo.util.plugin.interface_services[modelapi[key]]

    #print "HERE", data.options._usermodel_plugins

    if 'pyomo_preprocess' in usermodel_dir:
        if data.options.model.object_name in usermodel_dir:
            msg = "Preprocessing function 'pyomo_preprocess' defined in file" \
                  " '%s', but model is already constructed!"
            raise SystemExit(msg % data.options.model.filename)
        getattr(data.local.usermodel, 'pyomo_preprocess')(options=data.options)
    #
    return data
Exemplo n.º 6
0
def create_model(data):
    """
    Create instance of Pyomo model.

    Return:
        model:      Model object.
        instance:   Problem instance.
        symbol_map: Symbol map created when writing model to a file.
        filename:    Filename that a model instance was written to.
    """
    #
    if not data.options.runtime.logging == 'quiet':
        sys.stdout.write('[%8.2f] Creating model\n' %
                         (time.time() - start_time))
        sys.stdout.flush()
    #
    if (pympler_available is True) and (data.options.runtime.profile_memory >=
                                        1):
        global memory_data
        mem_used = muppy.get_size(muppy.get_objects())
        data.local.max_memory = mem_used
        print("   Total memory = %d bytes prior to model construction" %
              mem_used)
    #
    # Find the Model objects
    #
    _models = {}
    for _name, _obj in iteritems(data.local.usermodel.__dict__):
        if isinstance(_obj, Model):
            _models[_name] = _obj
    model_name = data.options.model.object_name
    if len(_models) == 1:
        _name = list(_models.keys())[0]
        if model_name is None:
            model_name = _name
        elif model_name != _name:
            msg = "Model '%s' is not defined in file '%s'!"
            raise SystemExit(msg % (model_name, data.options.model.filename))
    elif len(_models) > 1:
        if model_name is None:
            msg = "Multiple models defined in file '%s'!"
            raise SystemExit(msg % data.options.model.filename)
        elif not model_name in _models:
            msg = "Unknown model '%s' in file '%s'!"
            raise SystemExit(msg % (model_name, data.options.model.filename))

    ep = ExtensionPoint(IPyomoScriptCreateModel)

    if model_name is None:
        if len(ep) == 0:
            msg = "A model is not defined and the 'pyomo_create_model' is not "\
                  "provided in module %s"
            raise SystemExit(msg % data.options.model.filename)
        elif len(ep) > 1:
            msg = 'Multiple model construction plugins have been registered in module %s!'
            raise SystemExit(msg % data.options.model.filename)
        else:
            model_options = data.options.model.options.value()
            model = ep.service().apply(
                options=pyutilib.misc.Container(*data.options),
                model_options=pyutilib.misc.Container(*model_options))
    else:
        if model_name not in _models:
            msg = "Model '%s' is not defined in file '%s'!"
            raise SystemExit(msg % (model_name, data.options.model.filename))
        model = _models[model_name]
        if model is None:
            msg = "'%s' object is 'None' in module %s"
            raise SystemExit(msg % (model_name, data.options.model.filename))
        elif len(ep) > 0:
            msg = "Model construction function 'create_model' defined in "    \
                  "file '%s', but model is already constructed!"
            raise SystemExit(msg % data.options.model.filename)

    #
    # Print model
    #
    for ep in ExtensionPoint(IPyomoScriptPrintModel):
        ep.apply(options=data.options, model=model)

    #
    # Create Problem Instance
    #
    ep = ExtensionPoint(IPyomoScriptCreateDataPortal)
    if len(ep) > 1:
        msg = 'Multiple model data construction plugins have been registered!'
        raise SystemExit(msg)

    if len(ep) == 1:
        modeldata = ep.service().apply(options=data.options, model=model)
    else:
        modeldata = DataPortal()

    if model._constructed:
        #
        # TODO: use a better test for ConcreteModel
        #
        instance = model

    elif len(data.options.data.files) > 1:
        #
        # Load a list of *.dat files
        #
        for file in data.options.data.files:
            suffix = (file).split(".")[-1]
            if suffix != "dat":
                msg = 'When specifiying multiple data files, they must all '  \
                      'be *.dat files.  File specified: %s'
                raise SystemExit(msg % str(file))

            modeldata.load(filename=file, model=model)

        instance = model.create_instance(
            modeldata,
            namespaces=data.options.data.namespaces,
            profile_memory=data.options.runtime.profile_memory,
            report_timing=data.options.runtime.report_timing)

    elif len(data.options.data.files) == 1:
        #
        # Load a *.dat file or process a *.py data file
        #
        suffix = (data.options.data.files[0]).split(".")[-1].lower()
        if suffix == "dat":
            instance = model.create_instance(
                data.options.data.files[0],
                namespaces=data.options.data.namespaces,
                profile_memory=data.options.runtime.profile_memory,
                report_timing=data.options.runtime.report_timing)
        elif suffix == "py":
            userdata = pyutilib.misc.import_file(data.options.data.files[0],
                                                 clear_cache=True)
            if "modeldata" in dir(userdata):
                if len(ep) == 1:
                    msg = "Cannot apply 'pyomo_create_modeldata' and use the" \
                          " 'modeldata' object that is provided in the model"
                    raise SystemExit(msg)

                if userdata.modeldata is None:
                    msg = "'modeldata' object is 'None' in module %s"
                    raise SystemExit(msg % str(data.options.data.files[0]))

                modeldata = userdata.modeldata

            else:
                if len(ep) == 0:
                    msg = "Neither 'modeldata' nor 'pyomo_create_dataportal' "  \
                          'is defined in module %s'
                    raise SystemExit(msg % str(data.options.data.files[0]))

            modeldata.read(model)
            instance = model.create_instance(
                modeldata,
                namespaces=data.options.data.namespaces,
                profile_memory=data.options.runtime.profile_memory,
                report_timing=data.options.runtime.report_timing)
        elif suffix == "yml" or suffix == 'yaml':
            try:
                import yaml
            except:
                msg = "Cannot apply load data from a YAML file: PyYaml is not installed"
                raise SystemExit(msg)

            modeldata = yaml.load(open(data.options.data.files[0]))
            instance = model.create_instance(
                modeldata,
                namespaces=data.options.data.namespaces,
                profile_memory=data.options.runtime.profile_memory,
                report_timing=data.options.runtime.report_timing)
        else:
            raise ValueError("Unknown data file type: " +
                             data.options.data.files[0])
    else:
        instance = model.create_instance(
            modeldata,
            namespaces=data.options.data.namespaces,
            profile_memory=data.options.runtime.profile_memory,
            report_timing=data.options.runtime.report_timing)

    #
    modify_start_time = time.time()
    for ep in ExtensionPoint(IPyomoScriptModifyInstance):
        if data.options.runtime.report_timing is True:
            tick = time.time()
        ep.apply(options=data.options, model=model, instance=instance)
        if data.options.runtime.report_timing is True:
            print("      %6.2f seconds to apply %s" %
                  (time.time() - tick, type(ep)))
            tick = time.time()
    #
    for transformation in data.options.transform:
        with TransformationFactory(transformation) as xfrm:
            instance = xfrm.create_using(instance)
            if instance is None:
                raise SystemExit("Unexpected error while applying "
                                 "transformation '%s'" % transformation)
    #
    if data.options.runtime.report_timing is True:
        total_time = time.time() - modify_start_time
        print("      %6.2f seconds required for problem transformations" %
              total_time)

    if logger.isEnabledFor(logging.DEBUG):
        print("MODEL INSTANCE")
        instance.pprint()
        print("")

    for ep in ExtensionPoint(IPyomoScriptPrintInstance):
        ep.apply(options=data.options, instance=instance)

    fname = None
    smap_id = None
    if not data.options.model.save_file is None:

        if data.options.runtime.report_timing is True:
            write_start_time = time.time()

        if data.options.model.save_file == True:
            if data.local.model_format in (ProblemFormat.cpxlp,
                                           ProblemFormat.lpxlp):
                fname = (data.options.data.files[0])[:-3] + 'lp'
            else:
                fname = (data.options.data.files[0])[:-3] + str(
                    data.local.model_format)
            format = data.local.model_format
        else:
            fname = data.options.model.save_file
            format = data.options.model.save_format

        io_options = {}
        if data.options.model.symbolic_solver_labels:
            io_options['symbolic_solver_labels'] = True
        if data.options.model.file_determinism != 1:
            io_options[
                'file_determinism'] = data.options.model.file_determinism
        (fname, smap_id) = instance.write(filename=fname,
                                          format=format,
                                          io_options=io_options)

        if not data.options.runtime.logging == 'quiet':
            if not os.path.exists(fname):
                print("ERROR: file " + fname + " has not been created!")
            else:
                print("Model written to file '" + str(fname) + "'")

        if data.options.runtime.report_timing is True:
            total_time = time.time() - write_start_time
            print("      %6.2f seconds required to write file" % total_time)

        if (pympler_available is True) and (data.options.runtime.profile_memory
                                            >= 2):
            print("")
            print("      Summary of objects following file output")
            post_file_output_summary = summary.summarize(muppy.get_objects())
            summary.print_(post_file_output_summary, limit=100)

            print("")

    for ep in ExtensionPoint(IPyomoScriptSaveInstance):
        ep.apply(options=data.options, instance=instance)

    if (pympler_available is True) and (data.options.runtime.profile_memory >=
                                        1):
        mem_used = muppy.get_size(muppy.get_objects())
        if mem_used > data.local.max_memory:
            data.local.max_memory = mem_used
        print("   Total memory = %d bytes following Pyomo instance creation" %
              mem_used)

    return pyutilib.misc.Options(model=model,
                                 instance=instance,
                                 smap_id=smap_id,
                                 filename=fname,
                                 local=data.local)
Exemplo n.º 7
0
    def construct_scenario_instance(self,
                                    scenario_name,
                                    scenario_tree,
                                    profile_memory=False,
                                    output_instance_construction_time=False,
                                    compile_instance=False):

        if not scenario_tree.contains_scenario(scenario_name):
            raise ValueError("ScenarioTree does not contain scenario "
                             "with name %s." % (scenario_name))

        scenario = scenario_tree.get_scenario(scenario_name)
        node_name_list = [n._name for n in scenario._node_list]

        if self._verbose:
            print("Creating instance for scenario=%s" % (scenario_name))

        scenario_instance = None

        try:

            if self._model_callback is not None:

                assert self._model_object is None
                scenario_instance = self._model_callback(scenario_name,
                                                         node_name_list)

            elif self._model_object is not None:

                if scenario_tree._scenario_based_data:

                    scenario_data_filename = \
                        os.path.join(self._scenario_tree_directory,
                                     str(scenario_name))
                    # JPW: The following is a hack to support
                    #      initialization of block instances, which
                    #      don't work with .dat files at the
                    #      moment. Actually, it's not that bad of a
                    #      hack - it just needs to be extended a bit,
                    #      and expanded into the node-based data read
                    #      logic (where yaml is completely ignored at
                    #      the moment.
                    if os.path.exists(scenario_data_filename+'.dat'):
                        scenario_data_filename = \
                            scenario_data_filename + ".dat"
                        data = None
                    elif os.path.exists(scenario_data_filename+'.yaml'):
                        import yaml
                        scenario_data_filename = \
                            scenario_data_filename + ".yaml"
                        yaml_input_file=open(scenario_data_filename,"r")
                        data = yaml.load(yaml_input_file)
                        yaml_input_file.close()
                    else:
                        raise RuntimeError(
                            "Cannot find the scenario data for "
                            + scenario_data_filename)
                    if self._verbose:
                        print("Data for scenario=%s loads from file=%s"
                              % (scenario_name, scenario_data_filename))
                    if data is None:
                        scenario_instance = \
                            self._model_object.create_instance(
                                filename=scenario_data_filename,
                                preprocess=False,
                                profile_memory=profile_memory,
                                report_timing=output_instance_construction_time)
                    else:
                        scenario_instance = \
                            self._model_object.create_instance(
                                data,
                                preprocess=False,
                                profile_memory=profile_memory,
                                report_timing=output_instance_construction_time)
                else:

                    data_files = []
                    for node_name in node_name_list:
                        node_data_filename = \
                            os.path.join(self._scenario_tree_directory,
                                         str(node_name)+".dat")
                        if not os.path.exists(node_data_filename):
                            raise RuntimeError(
                                "Node data file="+node_data_filename+
                                " does not exist or cannot be accessed")
                        data_files.append(node_data_filename)

                    scenario_data = DataPortal(model=self._model_object)
                    for data_file in data_files:
                        if self._verbose:
                            print("Node data for scenario=%s partially "
                                  "loading from file=%s"
                                  % (scenario_name, data_file))
                        scenario_data.load(filename=data_file)

                    scenario_instance = self._model_object.create_instance(
                        scenario_data,
                        preprocess=False,
                        profile_memory=profile_memory,
                        report_timing=output_instance_construction_time)
            else:
                raise RuntimeError("Unable to construct scenario instance. "
                                   "Neither a reference model or callback "
                                   "is defined.")

            # name each instance with the scenario name
            scenario_instance.name = scenario_name

            # apply each of the post-instance creation plugins. this
            # really shouldn't be associated (in terms of naming) with the
            # pyomo script - this should be rectified with a workflow
            # re-work. it is unclear how this interacts, or doesn't, with
            # the preprocessors.
            ep = ExtensionPoint(IPyomoScriptModifyInstance)
            for ep in ExtensionPoint(IPyomoScriptModifyInstance):
                logger.warning(
                    "DEPRECATED: IPyomoScriptModifyInstance extension "
                    "point callbacks will be ignored by PySP in the future")
                ep.apply(options=None,
                         model=reference_model,
                         instance=scenario_instance)

            if compile_instance:
                from pyomo.repn.beta.matrix import compile_block_linear_constraints
                compile_block_linear_constraints(
                    scenario_instance,
                    "_PySP_compiled_linear_constraints",
                    verbose=self._verbose)

        except Exception as exc:
            msg = ("Failed to create model instance for scenario=%s"
                   % (scenario_name))
            print(msg)
            raise

        return scenario_instance
Exemplo n.º 8
0
def cleanup():
    for key in modelapi:
        for ep in ExtensionPoint(modelapi[key]):
            ep.deactivate()
Exemplo n.º 9
0
class OptProblem(object):
    """
    A class that defines an application that can be optimized
    by a COLIN optimizer via system calls.
    """

    io_manager = ExtensionPoint(IBlackBoxOptProblemIO)

    def __init__(self):
        """
        The constructor.  Derived classes should define the response types.

        By default, only function evaluations are supported in an OptProblem
        instance.
        """
        self.response_types = [response_enum.FunctionValue]

    def main(self, argv, format='colin'):
        """
        The main routine for parsing the command-line and executing
        the evaluation.
        """
        if len(argv) < 3:  #pragma:nocover
            print(argv[0] + " <input> <output> <log>")
            sys.exit(1)
        #
        # Get enum strings
        #
        self.response_str = list(map(str, self.response_types))
        #
        # Parse XML input file
        #
        iomngr = OptProblem.io_manager.service(format)
        if iomngr is None:
            raise ValueError("Unknown IO format '%s' for COLIN OptProblem" %
                             str(format))
        if not os.path.exists(argv[1]):
            raise IOError("Unknown input file '%s'" % argv[1])
        self._compute_prefix(argv[1])
        point = self.create_point()
        point, requests = iomngr.read(argv[1], point)
        self.validate(point)
        response = self._compute_results(point, requests)
        iomngr.write(argv[2], response)

    def create_point(self):
        """
        Create the point type for this domain.
        This method is over-written to customized an OptProblem
        for the search domain.
        """
        return None  #pragma:nocover

    def function_value(self, point):
        """
        Compute a function value.
        """
        return None  #pragma:nocover

    def function_values(self, point):  #pragma:nocover
        """
        Compute a list of function values.
        """
        val = self.function_value(point)
        if val is None:
            return []
        else:
            return [val]

    def gradient(self, point):
        """
        Compute a function gradient.
        """
        return []  #pragma:nocover

    def hessian(self, point):
        """
        Compute a function Hessian matrix.
        """
        return {}  #pragma:nocover

    def nonlinear_constraint_values(self, point):
        """
        Compute nonlinear constraint values.
        """
        return []  #pragma:nocover

    def jacobian(self, point):
        """
        Compute the Jacobian.
        """
        return {}  #pragma:nocover

    def _compute_results(self, point, requests):
        """
        Compute the requested results.
        """
        response = {}
        for key in requests:
            if key not in self.response_str:
                response[
                    key] = "ERROR: Unsupported application request %s" % str(
                        key)
            #
            elif key == "FunctionValue":
                response[key] = self.function_value(point)
            elif key == "FunctionValues":
                response[key] = self.function_values(point)
            elif key == "Gradient":
                response[key] = self.gradient(point)
            elif key == "NonlinearConstraintValues":
                response[key] = self.nonlinear_constraint_values(point)
            elif key == "Jacobian":
                response[key] = self.jacobian(point)
            elif key == "Hessian":
                response[key] = self.hessian(point)
            #
        return response

    def _compute_prefix(self, filename):
        base, ext = os.path.splitext(filename)
        self.prefix = base

    def validate(self, point):  #pragma:nocover
        """
        This function should throw an exception if an error occurs
        """
        pass
Exemplo n.º 10
0
class Model(SimpleBlock):
    """
    An optimization model.  By default, this defers construction of components
    until data is loaded.
    """

    preprocessor_ep = ExtensionPoint(IPyomoPresolver)

    _Block_reserved_words = set()

    def __new__(cls, *args, **kwds):
        if cls != Model:
            return super(Model, cls).__new__(cls)

        logger.warning(
            """DEPRECATION WARNING: Using the 'Model' class is deprecated.  Please
use the AbstractModel or ConcreteModel class instead.""")
        return AbstractModel.__new__(AbstractModel)

    def __init__(self, name='unknown', **kwargs):
        """Constructor"""
        #
        # NOTE: The 'ctype' keyword argument is not defined here.  Thus,
        # a model is treated as a 'Block' class type.  This simplifies
        # the definition of the block_data_objects() method, since we treat
        # Model and Block objects as the same.  Similarly, this avoids
        # the requirement to import PyomoModel.py in the block.py file.
        #
        SimpleBlock.__init__(self, **kwargs)
        self._name = name
        self.statistics = Container()
        self.config = PyomoConfig()
        self.solutions = ModelSolutions(self)
        self.config.preprocessor = 'pyomo.model.simple_preprocessor'

    def compute_statistics(self, active=True):
        """
        Compute model statistics
        """
        if len(self.statistics) > 0:
            return
        self.statistics.number_of_variables = 0
        self.statistics.number_of_constraints = 0
        self.statistics.number_of_objectives = 0
        for block in self.block_data_objects(active=active):
            for data in self.component_map(Var, active=active).itervalues():
                self.statistics.number_of_variables += len(data)
            for data in self.component_map(Objective,
                                           active=active).itervalues():
                self.statistics.number_of_objectives += len(data)
            for data in self.component_map(Constraint,
                                           active=active).itervalues():
                self.statistics.number_of_constraints += len(data)

    def nvariables(self):
        self.compute_statistics()
        return self.statistics.number_of_variables

    def nconstraints(self):
        self.compute_statistics()
        return self.statistics.number_of_constraints

    def nobjectives(self):
        self.compute_statistics()
        return self.statistics.number_of_objectives

    def create_instance(self,
                        filename=None,
                        data=None,
                        name=None,
                        namespace=None,
                        namespaces=None,
                        profile_memory=0,
                        report_timing=False,
                        **kwds):
        """
        Create a concrete instance of an abstract model, possibly using data
        read in from a file.

        Optional:
            filename:           The name of a Pyomo Data File that will be used
                                    to load data into the model.
            data:               A dictionary containing initialization data for
                                    the model to be used if there is no filename
            name:               The name given to the model.
            namespace:          A namespace used to select data.
            namespaces:         A list of namespaces used to select data.
            profile_memory:     A number that indicates the profiling level.
            report_timing:      Report timing statistics during construction.
        """
        #
        # Generate a warning if this is a concrete model but the
        # filename is specified.  A concrete model is already
        # constructed, so passing in a data file is a waste of time.
        #
        if self.is_constructed() and isinstance(filename, string_types):
            msg = "The filename=%s will not be loaded - supplied as an " \
                  "argument to the create_instance() method of a "\
                  "concrete instance with name=%s." % (filename, name)
            logger.warning(msg)

        if 'clone' in kwds:
            kwds.pop('clone')
            logger.warning(
                """DEPRECATION WARNING: Model.create_instance() no longer accepts the
'clone' argument: the base abstract model is always cloned.""")
        if 'preprocess' in kwds:
            kwds.pop('preprocess')
            logger.warning(
                """DEPRECATION WARNING: Model.create_instance() no longer accepts the
'preprocess' argument: preprocessing is always deferred to when the
model is sent to the solver""")
        if kwds:
            msg = \
"""Model.create_instance() passed the following unrecognized keyword
arguments (which have been ignored):"""
            for k in kwds:
                msg = msg + "\n    '%s'" % (k, )
            logger.error(msg)

        if self.is_constructed():
            logger.warning(
                """DEPRECATION WARNING: Cannot call Model.create_instance() on a
constructed model; returning a clone of the current model instance.""")
            return self.clone()

        if name is None:
            name = self.name
        if filename is not None:
            if data is not None:
                logger.warning(
                    "Model.create_instance() passed both 'filename' "
                    "and 'data' keyword arguments.  Ignoring the "
                    "'data' argument")
            data = filename
        if data is None:
            data = {}

        #
        # Clone the model and load the data
        #
        instance = self.clone()

        if name is not None:
            instance._name = name

        # If someone passed a rule for creating the instance, fire the
        # rule before constructing the components.
        if instance._rule is not None:
            instance._rule(instance)

        if namespaces:
            _namespaces = list(namespaces)
        else:
            _namespaces = []
        if namespace is not None:
            _namespaces.append(namespace)
        if None not in _namespaces:
            _namespaces.append(None)

        instance.load(data,
                      namespaces=_namespaces,
                      profile_memory=profile_memory,
                      report_timing=report_timing)

        #
        # Preprocess the new model
        #

        if False and preprocess is True:

            if report_timing is True:
                start_time = time.time()

            instance.preprocess()

            if report_timing is True:
                total_time = time.time() - start_time
                print("      %6.2f seconds required for preprocessing" %
                      total_time)

            if (pympler_available is True) and (profile_memory >= 2):
                mem_used = muppy.get_size(muppy.get_objects())
                print(
                    "      Total memory = %d bytes following instance preprocessing"
                    % mem_used)
                print("")

            if (pympler_available is True) and (profile_memory >= 2):
                print("")
                print(
                    "      Summary of objects following instance preprocessing"
                )
                post_preprocessing_summary = summary.summarize(
                    muppy.get_objects())
                summary.print_(post_preprocessing_summary, limit=100)

        #
        # Indicate that the model is concrete/constructed
        #
        instance._constructed = True
        return instance

    def clone(self):
        instance = SimpleBlock.clone(self)
        # Do not keep cloned solutions, which point to the original model
        instance.solutions.clear()
        instance.solutions._instance = weakref_ref(instance)
        return instance

    def preprocess(self, preprocessor=None):
        """Apply the preprocess plugins defined by the user"""
        with PauseGC() as pgc:
            if preprocessor is None:
                preprocessor = self.config.preprocessor
            pyomo.util.PyomoAPIFactory(preprocessor)(self.config, model=self)

    def load(self,
             arg,
             namespaces=[None],
             profile_memory=0,
             report_timing=False):
        """
        Load the model with data from a file, dictionary or DataPortal object.
        """
        if arg is None or isinstance(arg, basestring):
            dp = DataPortal(filename=arg, model=self)
        elif type(arg) is DataPortal:
            dp = arg
        elif type(arg) is dict:
            dp = DataPortal(data_dict=arg, model=self)
        elif isinstance(arg, SolverResults):
            if len(arg.solution):
                logger.warning(
                    """DEPRECATION WARNING: the Model.load() method is deprecated for
loading solutions stored in SolverResults objects.  Call
Model.solutions.load_from().""")
                self.solutions.load_from(arg)
            else:
                logger.warning(
                    """DEPRECATION WARNING: the Model.load() method is deprecated for
loading solutions stored in SolverResults objects.  By default, results
from solvers are immediately loaded into the original model instance.""")
            return
        else:
            msg = "Cannot load model model data from with object of type '%s'"
            raise ValueError(msg % str(type(arg)))
        self._load_model_data(dp,
                              namespaces,
                              profile_memory=profile_memory,
                              report_timing=report_timing)

    def _tuplize(self, data, setobj):
        if data is None:  #pragma:nocover
            return None
        if setobj.dimen == 1:
            return data
        ans = {}
        for key in data:
            if type(data[key][0]) is tuple:
                return data
            ans[key] = tuplize(data[key], setobj.dimen, setobj.local_name)
        return ans

    def _load_model_data(self, modeldata, namespaces, **kwds):
        """
        Load declarations from a DataPortal object.
        """
        #
        # As we are primarily generating objects here (and acyclic ones
        # at that), there is no need to run the GC until the entire
        # model is created.  Simple reference-counting should be
        # sufficient to keep memory use under control.
        #
        with PauseGC() as pgc:

            #
            # Unlike the standard method in the pympler summary
            # module, the tracker doesn't print 0-byte entries to pad
            # out the limit.
            #
            profile_memory = kwds.get('profile_memory', 0)

            #
            # It is often useful to report timing results for various
            # activities during model construction.
            #
            report_timing = kwds.get('report_timing', False)

            if (pympler_available is True) and (profile_memory >= 2):
                mem_used = muppy.get_size(muppy.get_objects())
                print("")
                print("      Total memory = %d bytes prior to model "
                      "construction" % mem_used)

            if (pympler_available is True) and (profile_memory >= 3):
                gc.collect()
                mem_used = muppy.get_size(muppy.get_objects())
                print("      Total memory = %d bytes prior to model "
                      "construction (after garbage collection)" % mem_used)

            #
            # Do some error checking
            #
            for namespace in namespaces:
                if not namespace is None and not namespace in modeldata._data:
                    msg = "Cannot access undefined namespace: '%s'"
                    raise IOError(msg % namespace)

            #
            # Initialize each component in order.
            #

            if report_timing is True:
                import pyomo.core.base.expr as EXPR
                construction_start_time = time.time()

            for component_name, component in iteritems(self.component_map()):

                if component.type() is Model:
                    continue

                if report_timing is True:
                    start_time = time.time()
                    clone_counters = EXPR.generate_expression.clone_counter

                self._initialize_component(modeldata, namespaces,
                                           component_name, profile_memory)

                if report_timing is True:
                    total_time = time.time() - start_time
                    if isinstance(component, IndexedComponent):
                        clen = len(component)
                    else:
                        assert isinstance(component, Component)
                        clen = 1
                    print("    %%6.%df seconds required to construct component=%s; %d indicies total" \
                              % (total_time>=0.005 and 2 or 0, component_name, clen) \
                              % total_time)
                    tmp_clone_counters = EXPR.generate_expression.clone_counter
                    if clone_counters != tmp_clone_counters:
                        clone_counters = tmp_clone_counters
                        print(
                            "             Cloning detected! (clone counters: %d)"
                            % clone_counters)

            # Note: As is, connectors are expanded when using command-line pyomo but not calling model.create(...) in a Python script.
            # John says this has to do with extension points which are called from commandline but not when writing scripts.
            # Uncommenting the next two lines switches this (command-line fails because it tries to expand connectors twice)
            #connector_expander = ConnectorExpander()
            #connector_expander.apply(instance=self)

            if report_timing is True:
                total_construction_time = time.time() - construction_start_time
                print("      %6.2f seconds required to construct instance=%s" %
                      (total_construction_time, self.name))

            if (pympler_available is True) and (profile_memory >= 2):
                print("")
                print(
                    "      Summary of objects following instance construction")
                post_construction_summary = summary.summarize(
                    muppy.get_objects())
                summary.print_(post_construction_summary, limit=100)
                print("")

    def _initialize_component(self, modeldata, namespaces, component_name,
                              profile_memory):
        declaration = self.component(component_name)

        if component_name in modeldata._default:
            if declaration.type() is not Set:
                declaration.set_default(modeldata._default[component_name])
        data = None

        for namespace in namespaces:
            if component_name in modeldata._data.get(namespace, {}):
                if declaration.type() is Set:
                    data = self._tuplize(
                        modeldata._data[namespace][component_name],
                        declaration)
                else:
                    data = modeldata._data[namespace][component_name]
            if not data is None:
                break

        if __debug__ and logger.isEnabledFor(logging.DEBUG):
            _blockName = "Model" if self.parent_block() is None \
                else "Block '%s'" % self.name
            logger.debug("Constructing %s '%s' on %s from data=%s",
                         declaration.__class__.__name__, declaration.name,
                         _blockName, str(data))
        try:
            declaration.construct(data)
        except:
            err = sys.exc_info()[1]
            logger.error(
                "Constructing component '%s' from data=%s failed:\n%s: %s",
                str(declaration.name),
                str(data).strip(),
                type(err).__name__, err)
            raise

        if __debug__ and logger.isEnabledFor(logging.DEBUG):
            _out = StringIO()
            declaration.pprint(ostream=_out)
            logger.debug("Constructed component '%s':\n%s" %
                         (declaration.name, _out.getvalue()))

        if (pympler_available is True) and (profile_memory >= 2):
            mem_used = muppy.get_size(muppy.get_objects())
            print(
                "      Total memory = %d bytes following construction of component=%s"
                % (mem_used, component_name))

        if (pympler_available is True) and (profile_memory >= 3):
            gc.collect()
            mem_used = muppy.get_size(muppy.get_objects())
            print(
                "      Total memory = %d bytes following construction of component=%s (after garbage collection)"
                % (mem_used, component_name))

    def create(self, filename=None, **kwargs):
        """
        Create a concrete instance of this Model, possibly using data
        read in from a file.
        """
        logger.warning(
            """DEPRECATION WARNING: the Model.create() method is deprecated.  Call
Model.create_instance() to create a concrete instance from an abstract
model.  You do not need to call Model.create() for a concrete model.""")
        return self.create_instance(filename=filename, **kwargs)

    def transform(self, name=None, **kwds):
        if name is None:
            logger.warning(
                """DEPRECATION WARNING: Model.transform() is deprecated.  Use
TransformationFactory().services() method to get the list of known
transformations.""")
            return TransformationFactory.services()

        logger.warning(
            """DEPRECATION WARNING: Model.transform() is deprecated.  Use
TransformationFactory('%s') to construct a transformation object, or
TransformationFactory('%s').apply_to(model) to directly apply the
transformation to the model instance.""" % (
                name,
                name,
            ))

        xfrm = TransformationFactory(name)
        if xfrm is None:
            raise ValueError("Unknown model transformation '%s'" % name)
        return xfrm.apply_to(self, **kwds)
Exemplo n.º 11
0
def convert_problem(args,
                    target_problem_type,
                    valid_problem_types,
                    has_capability=lambda x: False,
                    **kwds):
    """
    Convert a problem, defined by the 'args' tuple, into another
    problem.
    """

    if len(valid_problem_types) == 0:
        raise ConverterError("No valid problem types")

    if not (target_problem_type is None or \
             target_problem_type in valid_problem_types):
        msg = "Problem type '%s' is not valid"
        raise ConverterError(msg % str( target_problem_type ))

    if len(args) == 0:
        raise ConverterError("Empty argument list")

    #
    # Setup list of source problem types
    #
    tmp = args[0]
    if isinstance(tmp,basestring):
        fname = tmp.split(os.sep)[-1]
        if os.sep in fname:   #pragma:nocover
            fname = tmp.split(os.sep)[-1]
        source_ptype = [guess_format(fname)]
        if source_ptype is [None]:
            raise ConverterError("Unknown suffix type: "+tmp)
    else:
        source_ptype = args[0].valid_problem_types()

    #
    # Setup list of valid problem types
    #
    valid_ptypes = copy.copy(valid_problem_types)
    if target_problem_type is not None:
        valid_ptypes.remove(target_problem_type)
        valid_ptypes = [target_problem_type]  + valid_ptypes
    if source_ptype[0] in valid_ptypes:
        valid_ptypes.remove(source_ptype[0])
        valid_ptypes = [source_ptype[0]]  + valid_ptypes

    #
    # Iterate over the valid problem types, starting with the target type
    #
    # Apply conversion and return for first match
    #
    for ptype in valid_ptypes:

        for s_ptype in source_ptype:

        #
        # If the source and target types are equal, then simply the return
        # the args (return just the first element of the tuple if it has length
        # one.
        #
            if s_ptype == ptype:
                return (args,ptype,None)
            #
            # Otherwise, try to convert
            #
            for converter in ExtensionPoint(IProblemConverter):

                if converter.can_convert(s_ptype, ptype):
                    tmp = [s_ptype,ptype] + list(args)
                    tmp = tuple(tmp)
                    # propagate input keywords to the converter
                    tmpkw = kwds
                    tmpkw['capabilities'] = has_capability
                    problem_files, symbol_map = converter.apply(*tmp, **tmpkw)
                    return problem_files, ptype, symbol_map

    msg = 'No conversion possible.  Source problem type: %s.  Valid target '  \
          'types: %s'
    raise ConverterError(msg % (str(source_ptype[0]), list(map(str, valid_ptypes))))
Exemplo n.º 12
0
def create_model(data):
    """
    Create instance of Pyomo model.

    Return:
        model:      Model object.
        instance:   Problem instance.
        symbol_map: Symbol map created when writing model to a file.
        filename:    Filename that a model instance was written to.
    """
    #
    if not data.options.runtime.logging == 'quiet':
        sys.stdout.write('[%8.2f] Creating model\n' % (time.time()-start_time))
        sys.stdout.flush()
    #
    if (pympler_available is True) and (data.options.runtime.profile_memory >= 1):
        global memory_data
        mem_used = muppy.get_size(muppy.get_objects())
        data.local.max_memory = mem_used
        print("   Total memory = %d bytes prior to model construction" % mem_used)
    #
    # Create Model
    #
    ep = ExtensionPoint(IPyomoScriptCreateModel)
    model_name = 'model'
    if data.options.model.object_name is not None: model_name = data.options.model.object_name

    if model_name in dir(data.local.usermodel):
        if len(ep) > 0:
            msg = "Model construction function 'create_model' defined in "    \
                  "file '%s', but model is already constructed!"
            raise SystemExit(msg % data.options.model.filename)
        model = getattr(data.local.usermodel, data.options.model.object_name)

        if model is None:
            msg = "'%s' object is 'None' in module %s"
            raise SystemExit(msg % (model_name, data.options.model.filename))

    else:
        if len(ep) == 0:
            msg = "Neither '%s' nor 'pyomo_create_model' are available in "    \
                  'module %s'
            raise SystemExit(msg % ( model_name, data.options.model.filename ))
        elif len(ep) > 1:
            msg = 'Multiple model construction plugins have been registered!'
            raise SystemExit(msg)
        else:
            model_options = data.options.model.options.value()
            #if model_options is None:
                #model_options = []
            model = ep.service().apply( options = pyutilib.misc.Container(*data.options), model_options=pyutilib.misc.Container(*model_options) )
    #
    for ep in ExtensionPoint(IPyomoScriptPrintModel):
        ep.apply( options=data.options, model=model )

    #
    # Create Problem Instance
    #
    ep = ExtensionPoint(IPyomoScriptCreateDataPortal)
    if len(ep) > 1:
        msg = 'Multiple model data construction plugins have been registered!'
        raise SystemExit(msg)

    if len(ep) == 1:
        modeldata = ep.service().apply( options=data.options, model=model )
    else:
        modeldata = DataPortal()


    if model._constructed:
        #
        # TODO: use a better test for ConcreteModel
        #
        instance = model

    elif len(data.options.data.files) > 1:
        #
        # Load a list of *.dat files
        #
        for file in data.options.data.files:
            suffix = (file).split(".")[-1]
            if suffix != "dat":
                msg = 'When specifiying multiple data files, they must all '  \
                      'be *.dat files.  File specified: %s'
                raise SystemExit(msg % str( file ))

            modeldata.load(filename=file, model=model)

        instance = model.create_instance(modeldata,
                                         namespaces=data.options.data.namespaces,
                                         profile_memory=data.options.runtime.profile_memory,
                                         report_timing=data.options.runtime.report_timing)

    elif len(data.options.data.files) == 1:
        #
        # Load a *.dat file or process a *.py data file
        #
        suffix = (data.options.data.files[0]).split(".")[-1].lower()
        if suffix == "dat":
            instance = model.create_instance(data.options.data.files[0],
                                             namespaces=data.options.data.namespaces,
                                             profile_memory=data.options.runtime.profile_memory,
                                             report_timing=data.options.runtime.report_timing)
        elif suffix == "py":
            userdata = pyutilib.misc.import_file(data.options.data.files[0], clear_cache=True)
            if "modeldata" in dir(userdata):
                if len(ep) == 1:
                    msg = "Cannot apply 'pyomo_create_modeldata' and use the" \
                          " 'modeldata' object that is provided in the model"
                    raise SystemExit(msg)

                if userdata.modeldata is None:
                    msg = "'modeldata' object is 'None' in module %s"
                    raise SystemExit(msg % str( data.options.data.files[0] ))

                modeldata=userdata.modeldata

            else:
                if len(ep) == 0:
                    msg = "Neither 'modeldata' nor 'pyomo_create_dataportal' "  \
                          'is defined in module %s'
                    raise SystemExit(msg % str( data.options.data.files[0] ))

            modeldata.read(model)
            instance = model.create_instance(modeldata,
                                             namespaces=data.options.data.namespaces,
                                             profile_memory=data.options.runtime.profile_memory,
                                             report_timing=data.options.runtime.report_timing)
        elif suffix == "yml" or suffix == 'yaml':
            try:
                import yaml
            except:
                msg = "Cannot apply load data from a YAML file: PyYaml is not installed"
                raise SystemExit(msg)

            modeldata = yaml.load(open(data.options.data.files[0]))
            instance = model.create_instance(modeldata,
                                             namespaces=data.options.data.namespaces,
                                             profile_memory=data.options.runtime.profile_memory,
                                             report_timing=data.options.runtime.report_timing)
        else:
            raise ValueError("Unknown data file type: "+data.options.data.files[0])
    else:
        instance = model.create_instance(modeldata,
                                         namespaces=data.options.data.namespaces,
                                         profile_memory=data.options.runtime.profile_memory,
                                         report_timing=data.options.runtime.report_timing)

    #
    modify_start_time = time.time()
    for ep in ExtensionPoint(IPyomoScriptModifyInstance):
        if data.options.runtime.report_timing is True:
            tick = time.time()
        ep.apply( options=data.options, model=model, instance=instance )
        if data.options.runtime.report_timing is True:
            print("      %6.2f seconds to apply %s" % (time.time() - tick, type(ep)))
            tick = time.time()
    #
    for transformation in data.options.transform:
        with TransformationFactory(transformation) as xfrm:
            instance = xfrm.create_using(instance)
            if instance is None:
                raise SystemExit("Unexpected error while applying "
                                 "transformation '%s'" % transformation)
    #
    if data.options.runtime.report_timing is True:
        total_time = time.time() - modify_start_time
        print("      %6.2f seconds required for problem transformations" % total_time)

    if logger.isEnabledFor(logging.DEBUG):
        print("MODEL INSTANCE")
        instance.pprint()
        print("")

    for ep in ExtensionPoint(IPyomoScriptPrintInstance):
        ep.apply( options=data.options, instance=instance )

    fname=None
    smap_id=None
    if not data.options.model.save_file is None:

        if data.options.runtime.report_timing is True:
            write_start_time = time.time()

        if data.options.model.save_file == True:
            if data.local.model_format in (ProblemFormat.cpxlp, ProblemFormat.lpxlp):
                fname = (data.options.data.files[0])[:-3]+'lp'
            else:
                fname = (data.options.data.files[0])[:-3]+str(data.local.model_format)
            format=data.local.model_format
        else:
            fname = data.options.model.save_file
            format= data.options.model.save_format

        io_options = {}
        if data.options.model.symbolic_solver_labels:
            io_options['symbolic_solver_labels'] = True
        if data.options.model.file_determinism != 1:
            io_options['file_determinism'] = data.options.model.file_determinism
        (fname, smap_id) = instance.write(filename=fname,
                                          format=format,
                                          io_options=io_options)

        if not data.options.runtime.logging == 'quiet':
            if not os.path.exists(fname):
                print("ERROR: file "+fname+" has not been created!")
            else:
                print("Model written to file '"+str(fname)+"'")

        if data.options.runtime.report_timing is True:
            total_time = time.time() - write_start_time
            print("      %6.2f seconds required to write file" % total_time)

        if (pympler_available is True) and (data.options.runtime.profile_memory >= 2):
            print("")
            print("      Summary of objects following file output")
            post_file_output_summary = summary.summarize(muppy.get_objects())
            summary.print_(post_file_output_summary, limit=100)

            print("")

    for ep in ExtensionPoint(IPyomoScriptSaveInstance):
        ep.apply( options=data.options, instance=instance )

    if (pympler_available is True) and (data.options.runtime.profile_memory >= 1):
        mem_used = muppy.get_size(muppy.get_objects())
        if mem_used > data.local.max_memory:
            data.local.max_memory = mem_used
        print("   Total memory = %d bytes following Pyomo instance creation" % mem_used)

    return pyutilib.misc.Options(
                    model=model, instance=instance,
                    smap_id=smap_id, filename=fname, local=data.local )
Exemplo n.º 13
0
    def construct_scenario_instance(self,
                                    scenario_name,
                                    scenario_tree,
                                    profile_memory=False,
                                    output_instance_construction_time=False,
                                    compile_instance=False,
                                    verbose=False):
        assert not self._closed
        if not scenario_tree.contains_scenario(scenario_name):
            raise ValueError("ScenarioTree does not contain scenario "
                             "with name %s." % (scenario_name))

        scenario = scenario_tree.get_scenario(scenario_name)
        node_name_list = [n._name for n in scenario._node_list]

        if verbose:
            print("Creating instance for scenario=%s" % (scenario_name))

        scenario_instance = None

        try:

            if self._model_callback is not None:

                assert self._model_object is None
                try:
                    _scenario_tree_arg = None
                    # new callback signature
                    if (self._scenario_tree_filename is not None) and \
                       self._scenario_tree_filename.endswith('.dat'):
                        # we started with a .dat file, so
                        # send the PySP scenario tree
                        _scenario_tree_arg = scenario_tree
                    elif self._scenario_tree_model is not None:
                        # We started from a Pyomo
                        # scenario tree model instance, or a
                        # networkx tree.
                        _scenario_tree_arg = self._scenario_tree_model
                    else:
                        # send the PySP scenario tree
                        _scenario_tree_arg = scenario_tree
                    scenario_instance = self._model_callback(
                        _scenario_tree_arg, scenario_name, node_name_list)
                except TypeError:
                    # old callback signature
                    # TODO:
                    #logger.warning(
                    #    "DEPRECATED: The 'pysp_instance_creation_callback' function "
                    #    "signature has changed. An additional argument should be "
                    #    "added to the beginning of the arguments list that will be "
                    #    "set to the user provided scenario tree object when called "
                    #    "by PySP (e.g., a Pyomo scenario tree model instance, "
                    #    "a networkx tree, or a PySP ScenarioTree object.")
                    scenario_instance = self._model_callback(
                        scenario_name, node_name_list)

            elif self._model_object is not None:

                if (not isinstance(self._model_object, AbstractModel)) or \
                   (self._model_object.is_constructed()):
                    scenario_instance = self._model_object.clone()
                elif scenario_tree._scenario_based_data:
                    assert self.data_directory() is not None
                    scenario_data_filename = \
                        os.path.join(self.data_directory(),
                                     str(scenario_name))
                    # JPW: The following is a hack to support
                    #      initialization of block instances, which
                    #      don't work with .dat files at the
                    #      moment. Actually, it's not that bad of a
                    #      hack - it just needs to be extended a bit,
                    #      and expanded into the node-based data read
                    #      logic (where yaml is completely ignored at
                    #      the moment.
                    if os.path.exists(scenario_data_filename + '.dat'):
                        scenario_data_filename = \
                            scenario_data_filename + ".dat"
                        data = None
                    elif os.path.exists(scenario_data_filename + '.yaml'):
                        if not has_yaml:
                            raise ValueError(
                                "Found yaml data file for scenario '%s' "
                                "but he PyYAML module is not available" %
                                (scenario_name))
                        scenario_data_filename = \
                            scenario_data_filename+".yaml"
                        with open(scenario_data_filename) as f:
                            data = yaml.load(f)
                    else:
                        raise RuntimeError(
                            "Cannot find a data file for scenario '%s' "
                            "in directory: %s\nRecognized formats: .dat, "
                            ".yaml" % (scenario_name, self.data_directory()))
                    if verbose:
                        print("Data for scenario=%s loads from file=%s" %
                              (scenario_name, scenario_data_filename))
                    if data is None:
                        scenario_instance = \
                            self._model_object.create_instance(
                                filename=scenario_data_filename,
                                profile_memory=profile_memory,
                                report_timing=output_instance_construction_time)
                    else:
                        scenario_instance = \
                            self._model_object.create_instance(
                                data,
                                profile_memory=profile_memory,
                                report_timing=output_instance_construction_time)
                else:
                    assert self.data_directory() is not None
                    data_files = []
                    for node_name in node_name_list:
                        node_data_filename = \
                            os.path.join(self.data_directory(),
                                         str(node_name)+".dat")
                        if not os.path.exists(node_data_filename):
                            raise RuntimeError(
                                "Cannot find a data file for scenario tree "
                                "node '%s' in directory: %s\nRecognized "
                                "formats: .dat" %
                                (node_name, self.data_directory()))
                        data_files.append(node_data_filename)

                    scenario_data = DataPortal(model=self._model_object)
                    for data_file in data_files:
                        if verbose:
                            print("Node data for scenario=%s partially "
                                  "loading from file=%s" %
                                  (scenario_name, data_file))
                        scenario_data.load(filename=data_file)

                    scenario_instance = self._model_object.create_instance(
                        scenario_data,
                        profile_memory=profile_memory,
                        report_timing=output_instance_construction_time)
            else:
                raise RuntimeError("Unable to construct scenario instance. "
                                   "Neither a reference model or callback "
                                   "is defined.")

            # name each instance with the scenario name
            scenario_instance._name = scenario_name

            # apply each of the post-instance creation plugins. this
            # really shouldn't be associated (in terms of naming) with the
            # pyomo script - this should be rectified with a workflow
            # re-work. it is unclear how this interacts, or doesn't, with
            # the preprocessors.
            ep = ExtensionPoint(IPyomoScriptModifyInstance)
            for ep in ExtensionPoint(IPyomoScriptModifyInstance):
                logger.warning(
                    "DEPRECATED: IPyomoScriptModifyInstance extension "
                    "point callbacks will be ignored by PySP in the future")
                ep.apply(options=None,
                         model=reference_model,
                         instance=scenario_instance)

            if compile_instance:
                from pyomo.repn.beta.matrix import \
                    compile_block_linear_constraints
                compile_block_linear_constraints(
                    scenario_instance,
                    "_PySP_compiled_linear_constraints",
                    verbose=verbose)

        except:
            logger.error("Failed to create model instance for scenario=%s" %
                         (scenario_name))
            raise

        return scenario_instance
Exemplo n.º 14
0
def EFAlgorithmBuilder(options, scenario_tree):

    solution_writer_plugins = ExtensionPoint(ISolutionWriterExtension)
    for plugin in solution_writer_plugins:
        plugin.disable()

    solution_plugins = []
    if len(options.solution_writer) > 0:
        for this_extension in options.solution_writer:
            if this_extension in sys.modules:
                print("User-defined EF solution writer module="
                      +this_extension+" already imported - skipping")
            else:
                print("Trying to import user-defined EF "
                      "solution writer module="+this_extension)
                # make sure "." is in the PATH.
                original_path = list(sys.path)
                sys.path.insert(0,'.')
                pyutilib.misc.import_file(this_extension)
                print("Module successfully loaded")
                sys.path[:] = original_path # restore to what it was

            # now that we're sure the module is loaded, re-enable this
            # specific plugin.  recall that all plugins are disabled
            # by default in phinit.py, for various reasons. if we want
            # them to be picked up, we need to enable them explicitly.
            import inspect
            module_to_find = this_extension
            if module_to_find.rfind(".py"):
                module_to_find = module_to_find.rstrip(".py")
            if module_to_find.find("/") != -1:
                module_to_find = module_to_find.split("/")[-1]

            for name, obj in inspect.getmembers(sys.modules[module_to_find], inspect.isclass):
                import pyomo.util
                # the second condition gets around goofyness related to issubclass returning
                # True when the obj is the same as the test class.
                if issubclass(obj, pyomo.util.plugin.SingletonPlugin) and name != "SingletonPlugin":
                    for plugin in solution_writer_plugins(all=True):
                        if isinstance(plugin, obj):
                            plugin.enable()
                            solution_plugins.append(plugin)

    ef_solver = SolverFactory(options.solver_type,
                              solver_io=options.solver_io)
    if isinstance(ef_solver, UnknownSolver):
        raise ValueError("Failed to create solver of type="+
                         options.solver_type+
                         " for use in extensive form solve")
    if len(options.solver_options) > 0:
        print("Initializing ef solver with options="
              +str(options.solver_options))
        ef_solver.set_options("".join(options.solver_options))
    if options.mipgap is not None:
        if (options.mipgap < 0.0) or (options.mipgap > 1.0):
            raise ValueError("Value of the mipgap parameter for the EF "
                             "solve must be on the unit interval; "
                             "value specified="+str(options.mipgap))
        ef_solver.options.mipgap = float(options.mipgap)

    ef_solver_manager = SolverManagerFactory(options.solver_manager_type,
                                             host=options.pyro_host,
                                             port=options.pyro_port)
    if ef_solver_manager is None:
        raise ValueError("Failed to create solver manager of type="
                         +options.solver_type+
                         " for use in extensive form solve")

    binding_instance = CreateExtensiveFormInstance(options, scenario_tree)

    ef = ExtensiveFormAlgorithm(options,
                                binding_instance,
                                scenario_tree,
                                ef_solver_manager,
                                ef_solver,
                                solution_plugins=solution_plugins)

    return ef
Exemplo n.º 15
0
    def type(self):
        return self._type

    def create(self, args):
        if self._swap:
            args = list(args)
            args.reverse()
        return self._cls(args)

def ExpressionFactory(name=None, args=[]):
    ep = ExpressionFactory.ep
    if name is None:
        return map(lambda x: x.name, ep())
    return ep.service(name).create(args)
ExpressionFactory.ep = ExtensionPoint(IPyomoExpression)


class IModelComponent(Interface):
    pass

ModelComponentFactory = CreatePluginFactory(IModelComponent)

def register_component(cls, description):
    class TMP(Plugin):
        implements(IModelComponent, service=False)
        alias(cls.__name__, description)
        component = cls


class IDataManager(Interface):