Ejemplo n.º 1
0
    def _setHistory(self, probname, store_hst, hot_start, def_fname):
        '''
        Setup Optimizer History and/or Hot-start instances

        **Arguments:**

        - probname  -> STR: Optimization problem name
        - store_hst -> BOOL/STR: Flag/filename to store optimization history
        - hot_start -> BOOL/STR: Flag/filename to read optimization history
        - def_fname -> STR: Default file name

        Documentation last updated:  Oct. 12, 2011 - Peter W. Jansen
        '''

        myrank = self.myrank

        hos_file = None
        log_file = None
        tmp_file = False
        if (myrank == 0):
            if isinstance(store_hst, str):
                if isinstance(hot_start, str):
                    if (store_hst == hot_start):
                        hos_file = History(hot_start, 'r', self)
                        log_file = History(store_hst + '_tmp', 'w', self,
                                           probname)
                        tmp_file = True
                    else:
                        hos_file = History(hot_start, 'r', self)
                        log_file = History(store_hst, 'w', self, probname)

                    self.sto_hst = True
                    self.hot_start = True
                elif hot_start:
                    hos_file = History(store_hst, 'r', self)
                    log_file = History(store_hst + '_tmp', 'w', self, probname)
                    self.sto_hst = True
                    self.hot_start = True
                    tmp_file = True
                else:
                    log_file = History(store_hst, 'w', self, probname)
                    self.sto_hst = True
                    self.hot_start = False

            elif store_hst:
                if isinstance(hot_start, str):
                    if (hot_start == def_fname):
                        hos_file = History(hot_start, 'r', self)
                        log_file = History(def_fname + '_tmp', 'w', self,
                                           probname)
                        tmp_file = True
                    else:
                        hos_file = History(hot_start, 'r', self)
                        log_file = History(def_fname, 'w', self, probname)

                    self.sto_hst = True
                    self.hot_start = True
                elif hot_start:
                    hos_file = History(def_fname, 'r', self)
                    log_file = History(def_fname + '_tmp', 'w', self, probname)
                    self.sto_hst = True
                    self.hot_start = True
                    tmp_file = True
                else:
                    log_file = History(def_fname, 'w', self, probname)
                    self.sto_hst = True
                    self.hot_start = False
            else:
                if isinstance(hot_start, str):
                    hos_file = History(hot_start, 'r', self)
                    self.hot_start = True
                elif hot_start:
                    hos_file = History(def_fname, 'r', self)
                    self.hot_start = True
                else:
                    self.hot_start = False

                self.sto_hst = False
        else:
            self.sto_hst = False
            self.hot_start = False

        return hos_file, log_file, tmp_file
Ejemplo n.º 2
0
    def __solve__(
        self,
        opt_problem={},
        sens_type="FD",
        store_sol=True,
        disp_opts=False,
        store_hst=False,
        hot_start=False,
        sens_mode="",
        sens_step={},
        *args,
        **kwargs
    ):
        """
        Run Optimizer (Optimize Routine)

        **Keyword arguments:**

        - opt_problem -> INST: Optimization instance
        - sens_type -> STR/FUNC: Gradient type, *Default* = 'FD'
        - store_sol -> BOOL: Store solution in Optimization class flag,
          *Default* = True
        - disp_opts -> BOOL: Flag to display options in solution text, *Default*
          = False
        - store_hst -> BOOL/STR: Flag/filename to store optimization history,
          *Default* = False
        - hot_start -> BOOL/STR: Flag/filename to read optimization history,
          *Default* = False
        - sens_mode -> STR: Flag for parallel gradient calculation, *Default* =
          ''
        - sens_step -> FLOAT: Sensitivity setp size, *Default* = {} [corresponds
          to 1e-6 (FD), 1e-20(CS)]

        Documentation last updated:  Feb. 2, 2011 - Peter W. Jansen
        """

        self.pll = False
        self.myrank = 0

        myrank = self.myrank

        tmp_file = False
        def_fname = self.options["output_file"][1].split(".")[0]
        if isinstance(store_hst, str):
            if isinstance(hot_start, str):
                if myrank == 0:
                    if store_hst == hot_start:
                        hos_file = History(hot_start, "r", self)
                        log_file = History(store_hst + "_tmp", "w", self, opt_problem.name)
                        tmp_file = True
                    else:
                        hos_file = History(hot_start, "r", self)
                        log_file = History(store_hst, "w", self, opt_problem.name)
                    # end
                # end
                self.sto_hst = True
                self.h_start = True
            elif hot_start:
                if myrank == 0:
                    hos_file = History(store_hst, "r", self)
                    log_file = History(store_hst + "_tmp", "w", self, opt_problem.name)
                    tmp_file = True
                # end
                self.sto_hst = True
                self.h_start = True
            else:
                if myrank == 0:
                    log_file = History(store_hst, "w", self, opt_problem.name)
                # end
                self.sto_hst = True
                self.h_start = False
            # end
        elif store_hst:
            if isinstance(hot_start, str):
                if hot_start == def_fname:
                    if myrank == 0:
                        hos_file = History(hot_start, "r", self)
                        log_file = History(def_fname + "_tmp", "w", self, opt_problem.name)
                        tmp_file = True
                    # end
                else:
                    if myrank == 0:
                        hos_file = History(hot_start, "r", self)
                        log_file = History(def_fname, "w", self, opt_problem.name)
                    # end
                # end
                self.sto_hst = True
                self.h_start = True
            elif hot_start:
                if myrank == 0:
                    hos_file = History(def_fname, "r", self)
                    log_file = History(def_fname + "_tmp", "w", self, opt_problem.name)
                    tmp_file = True
                # end
                self.sto_hst = True
                self.h_start = True
            else:
                if myrank == 0:
                    log_file = History(def_fname, "w", self, opt_problem.name)
                # end
                self.sto_hst = True
                self.h_start = False
            # end
        else:
            self.sto_hst = False
            self.h_start = False
        # end

        gradient = Gradient(opt_problem, sens_type, sens_mode, sens_step, *args, **kwargs)

        def eval_f(x, user_data=None):
            """IPOPT - Objective Value Function."""
            # Variables Groups Handling
            if opt_problem.use_groups:
                xg = {}
                for group in group_ids.keys():
                    if group_ids[group][1] - group_ids[group][0] == 1:
                        xg[group] = x[group_ids[group][0]]
                    else:
                        xg[group] = x[group_ids[group][0] : group_ids[group][1]]
                    # end
                # end
                xn = xg
            else:
                xn = x
            # end

            # Flush Output Files
            self.flushFiles()

            # Evaluate User Function
            fail = 0
            # if (myrank == 0):
            #    if self.h_start:
            #        [vals,hist_end] = hos_file.read(ident=['obj', 'con', 'fail'])
            #        if hist_end:
            #            self.h_start = False
            #            hos_file.close()
            #        else:
            #            [ff,gg,fail] = [vals['obj'][0][0],vals['con'][0],int(vals['fail'][0][0])]
            #        #end
            #    #end
            # end

            # if self.pll:
            #    self.h_start = Bcast(self.h_start,root=0)
            # end
            # if self.h_start and self.pll:
            #    [ff,gg,fail] = Bcast([ff,gg,fail],root=0)
            # else:
            [ff, gg, fail] = opt_problem.obj_fun(xn, *args, **kwargs)
            # end

            # Store History
            if myrank == 0:
                if self.sto_hst:
                    log_file.write(x, "x")
                    log_file.write(ff, "obj")
                    log_file.write(gg, "con")
                    log_file.write(fail, "fail")
                # end
            # end

            # Objective Assigment
            if isinstance(ff, complex):
                f = ff.astype(float)
            else:
                f = ff
            # end

            # Constraints Assigment
            g = numpy.zeros(len(opt_problem._constraints.keys()))
            for i in xrange(len(opt_problem._constraints.keys())):
                if isinstance(gg[i], complex):
                    g[i] = gg[i].astype(float)
                else:
                    g[i] = gg[i]
                # end
            # end

            return f

        def eval_g(x, user_data=None):

            # Variables Groups Handling
            if opt_problem.use_groups:
                xg = {}
                for group in group_ids.keys():
                    if group_ids[group][1] - group_ids[group][0] == 1:
                        xg[group] = x[group_ids[group][0]]
                    else:
                        xg[group] = x[group_ids[group][0] : group_ids[group][1]]
                    # end
                # end
                xn = xg
            else:
                xn = x
            # end

            # Flush Output Files
            self.flushFiles()

            # Evaluate User Function
            fail = 0
            #            if (myrank == 0):
            #                if self.h_start:
            #                    [vals,hist_end] = hos_file.read(ident=['obj', 'con', 'fail'])
            #                    if hist_end:
            #                        self.h_start = False
            #                        hos_file.close()
            #                    else:
            #                        [ff,gg,fail] = [vals['obj'][0][0],vals['con'][0],int(vals['fail'][0][0])]
            # end
            # end
            # end

            # if self.pll:
            #   self.h_start = Bcast(self.h_start,root=0)
            # end
            # if self.h_start and self.pll:
            #    [ff,gg,fail] = Bcast([ff,gg,fail],root=0)
            # else:
            [ff, gg, fail] = opt_problem.obj_fun(xn, *args, **kwargs)
            # end

            # Store History
            if myrank == 0:
                if self.sto_hst:
                    log_file.write(x, "x")
                    log_file.write(ff, "obj")
                    log_file.write(gg, "con")
                    log_file.write(fail, "fail")
                # end
            # end

            # Objective Assigment
            if isinstance(ff, complex):
                f = ff.astype(float)
            else:
                f = ff
            # end

            # Constraints Assigment
            g = numpy.zeros(len(opt_problem._constraints.keys()))
            for i in xrange(len(opt_problem._constraints.keys())):
                if isinstance(gg[i], complex):
                    g[i] = gg[i].astype(float)
                else:
                    g[i] = gg[i]
                # end
            # end

            return g

        def eval_grad_f(x, user_data=None):
            """IPOPT - Objective/Constraint Gradients Function."""
            # if self.h_start:
            #    if (myrank == 0):
            #        [vals,hist_end] = hos_file.read(ident=['grad_obj','grad_con'])
            #        if hist_end:
            #            self.h_start = False
            #            hos_file.close()
            #        else:
            #            dff = vals['grad_obj'][0].reshape((len(opt_problem._objectives.keys()),len(opt_problem._variables.keys())))
            #            dgg = vals['grad_con'][0].reshape((len(opt_problem._constraints.keys()),len(opt_problem._variables.keys())))
            #        #end
            #    #end
            #    if self.pll:
            #        self.h_start = Bcast(self.h_start,root=0)
            #    #end
            #    if self.h_start and self.pll:
            #        [dff,dgg] = Bcast([dff,dgg],root=0)
            #    #end
            # end

            # if not self.h_start:

            [f, g, fail] = opt_problem.obj_fun(x, *args, **kwargs)
            dff, dgg = gradient.getGrad(x, group_ids, [f], g, *args, **kwargs)

            # Store History
            if self.sto_hst and (myrank == 0):
                log_file.write(dff, "grad_obj")
                log_file.write(dgg, "grad_con")
            # end

            # Gradient Assignment
            df = numpy.zeros(len(opt_problem._variables.keys()))

            for i in xrange(len(opt_problem._variables.keys())):
                df[i] = dff[0, i]
            # end

            return df

        def eval_grad_g(x, flag, user_data=None):

            # if self.h_start:
            #    if (myrank == 0):
            #        [vals,hist_end] = hos_file.read(ident=['grad_obj','grad_con'])
            #        if hist_end:
            #            self.h_start = False
            #            hos_file.close()
            #        else:
            #            dff = vals['grad_obj'][0].reshape((len(opt_problem._objectives.keys()),len(opt_problem._variables.keys())))
            #            dgg = vals['grad_con'][0].reshape((len(opt_problem._constraints.keys()),len(opt_problem._variables.keys())))
            #        #end
            #    #end
            #    if self.pll:
            #        self.h_start = Bcast(self.h_start,root=0)
            #    #end
            #    if self.h_start and self.pll:
            #        [dff,dgg] = Bcast([dff,dgg],root=0)
            #    #end
            # end

            # if not self.h_start:

            if flag:
                a = numpy.zeros(len(opt_problem._variables.keys()) * len(opt_problem._constraints.keys()), int)
                b = numpy.zeros(len(opt_problem._variables.keys()) * len(opt_problem._constraints.keys()), int)

                for i in xrange(len(opt_problem._constraints.keys())):
                    for j in xrange(len(opt_problem._variables.keys())):
                        a[i * len(opt_problem._variables.keys()) + j] = i
                        b[i * len(opt_problem._variables.keys()) + j] = j
                return (a, b)

            else:
                [f, g, fail] = opt_problem.obj_fun(x, *args, **kwargs)
                dff, dgg = gradient.getGrad(x, group_ids, [f], g, *args, **kwargs)

                # Store History
                if self.sto_hst and (myrank == 0):
                    log_file.write(dff, "grad_obj")
                    log_file.write(dgg, "grad_con")
                # end

                # Gradient Assignment
                a = numpy.zeros([len(opt_problem._variables.keys()) * len(opt_problem._constraints.keys())])
                for i in xrange(len(opt_problem._constraints.keys())):
                    for j in xrange(len(opt_problem._variables.keys())):
                        a[i * len(opt_problem._variables.keys()) + j] = dgg[i, j]
                    # end
                # end

                return a

        # Variables Handling
        nvar = len(opt_problem._variables.keys())
        xl = []
        xu = []
        xx = []
        for key in opt_problem._variables.keys():
            if opt_problem._variables[key].type == "c":
                xl.append(opt_problem._variables[key].lower)
                xu.append(opt_problem._variables[key].upper)
                xx.append(opt_problem._variables[key].value)
            elif opt_problem._variables[key].type == "i":
                raise IOError("IPOPT cannot handle integer design variables")
            elif opt_problem._variables[key].type == "d":
                raise IOError("IPOPT cannot handle discrete design variables")
            # end
        # end
        xl = numpy.array(xl)
        xu = numpy.array(xu)
        xx = numpy.array(xx)

        # Variables Groups Handling
        group_ids = {}
        if opt_problem.use_groups:
            k = 0
            for key in opt_problem._vargroups.keys():
                group_len = len(opt_problem._vargroups[key]["ids"])
                group_ids[opt_problem._vargroups[key]["name"]] = [k, k + group_len]
                k += group_len
            # end
        # end

        # Constraints Handling
        ncon = len(opt_problem._constraints.keys())
        blc = []
        buc = []
        if ncon > 0:
            for key in opt_problem._constraints.keys():
                if opt_problem._constraints[key].type == "e":
                    blc.append(opt_problem._constraints[key].equal)
                    buc.append(opt_problem._constraints[key].equal)
                elif opt_problem._constraints[key].type == "i":
                    blc.append(opt_problem._constraints[key].lower)
                    buc.append(opt_problem._constraints[key].upper)
                # end
            # end
        else:
            if (store_sol) and (myrank == 0):
                print "Optimization Problem Does Not Have Constraints\n"
                print "Unconstrained Optimization Initiated\n"
            # end
            ncon = 1
            blc.append(-inf)
            buc.append(inf)
        # end
        blc = numpy.array(blc)
        buc = numpy.array(buc)

        # Objective Handling
        objfunc = opt_problem.obj_fun
        nobj = len(opt_problem._objectives.keys())
        ff = []
        for key in opt_problem._objectives.keys():
            ff.append(opt_problem._objectives[key].value)
        # end
        ff = numpy.array(ff)

        # Create an IPOPT instance problem
        nnzj = nvar * ncon
        nnzh = nvar * nvar
        ipopt = pyipopt.create(nvar, xl, xu, ncon, blc, buc, nnzj, nnzh, eval_f, eval_grad_f, eval_g, eval_grad_g)

        # Setup Options
        optionss = self.options.copy()
        del optionss["defaults"]

        for i in optionss:
            if not self.options["defaults"][i][1] == optionss[i][1]:
                if self.options[i][0].__name__ == "int":
                    ipopt.int_option(i, self.options[i][1])

                if self.options[i][0].__name__ == "float":
                    ipopt.num_option(i, self.options[i][1])

                if self.options[i][0].__name__ == "str":
                    ipopt.str_option(i, self.options[i][1])

        # Run IPOPT

        t0 = time.time()
        r = ipopt.solve(xx)
        sol_time = time.time() - t0

        if myrank == 0:
            if self.sto_hst:
                log_file.close()
                if tmp_file:
                    hos_file.close()
                    name = hos_file.filename
                    os.remove(name + ".cue")
                    os.remove(name + ".bin")
                    os.rename(name + "_tmp.cue", name + ".cue")
                    os.rename(name + "_tmp.bin", name + ".bin")
                # end
            # end
        # end

        ipopt.close()

        # Store Results
        sol_inform = {}
        print r
        sol_inform["value"] = r[-1]  # ifail[0]
        sol_inform["text"] = self.getInform(r[-1])  # self.getInform(ifail[0])

        if store_sol:
            sol_name = "IPOPT Solution to " + opt_problem.name

            sol_options = copy.copy(self.options)
            if "default" in sol_options:
                del sol_options["defaults"]
            # end

            sol_evals = 0

            sol_vars = copy.deepcopy(opt_problem._variables)
            i = 0
            x = r[0]
            for key in sol_vars.keys():
                sol_vars[key].value = x[i]
                i += 1
            # end

            sol_objs = copy.deepcopy(opt_problem._objectives)
            sol_objs[0].value = r[4]

            sol_cons = {}

            if ncon > 0:
                sol_lambda = r[3]
            else:
                sol_lambda = {}
            # end

            opt_problem.addSol(
                self.__class__.__name__,
                sol_name,
                objfunc,
                sol_time,
                sol_evals,
                sol_inform,
                sol_vars,
                sol_objs,
                sol_cons,
                sol_options,
                display_opts=disp_opts,
                Lambda=sol_lambda,
                Sensitivities=sens_type,
                myrank=myrank,
                arguments=args,
                **kwargs
            )

        # end

        return ff, xx, sol_inform  # ifail[0]
Ejemplo n.º 3
0
    def __solve__(self,
                  opt_problem={},
                  sens_type='FD',
                  store_sol=True,
                  disp_opts=False,
                  store_hst=False,
                  hot_start=False,
                  sens_mode='',
                  sens_step={},
                  *args,
                  **kwargs):
        """
        Run Optimizer (Optimize Routine)

        **Keyword arguments:**

        - opt_problem -> INST: Optimization instance
        - sens_type -> STR/FUNC: Gradient type, *Default* = 'FD'
        - store_sol -> BOOL: Store solution in Optimization class flag,
          *Default* = True
        - disp_opts -> BOOL: Flag to display options in solution text, *Default*
          = False
        - store_hst -> BOOL/STR: Flag/filename to store optimization history,
          *Default* = False
        - hot_start -> BOOL/STR: Flag/filename to read optimization history,
          *Default* = False
        - sens_mode -> STR: Flag for parallel gradient calculation, *Default* =
          ''
        - sens_step -> FLOAT: Sensitivity setp size, *Default* = {} [corresponds
          to 1e-6 (FD), 1e-20(CS)]

        Documentation last updated:  Feb. 2, 2011 - Peter W. Jansen
        """

        self.pll = False
        self.myrank = 0

        myrank = self.myrank

        tmp_file = False
        def_fname = self.options['output_file'][1].split('.')[0]
        if isinstance(store_hst, str):
            if isinstance(hot_start, str):
                if (myrank == 0):
                    if (store_hst == hot_start):
                        hos_file = History(hot_start, 'r', self)
                        log_file = History(store_hst + '_tmp', 'w', self,
                                           opt_problem.name)
                        tmp_file = True
                    else:
                        hos_file = History(hot_start, 'r', self)
                        log_file = History(store_hst, 'w', self,
                                           opt_problem.name)

                self.sto_hst = True
                self.hot_start = True
            elif hot_start:
                if (myrank == 0):
                    hos_file = History(store_hst, 'r', self)
                    log_file = History(store_hst + '_tmp', 'w', self,
                                       opt_problem.name)
                    tmp_file = True

                self.sto_hst = True
                self.hot_start = True
            else:
                if (myrank == 0):
                    log_file = History(store_hst, 'w', self, opt_problem.name)

                self.sto_hst = True
                self.hot_start = False

        elif store_hst:
            if isinstance(hot_start, str):
                if (hot_start == def_fname):
                    if (myrank == 0):
                        hos_file = History(hot_start, 'r', self)
                        log_file = History(def_fname + '_tmp', 'w', self,
                                           opt_problem.name)
                        tmp_file = True

                else:
                    if (myrank == 0):
                        hos_file = History(hot_start, 'r', self)
                        log_file = History(def_fname, 'w', self,
                                           opt_problem.name)

                self.sto_hst = True
                self.hot_start = True
            elif hot_start:
                if (myrank == 0):
                    hos_file = History(def_fname, 'r', self)
                    log_file = History(def_fname + '_tmp', 'w', self,
                                       opt_problem.name)
                    tmp_file = True

                self.sto_hst = True
                self.hot_start = True
            else:
                if (myrank == 0):
                    log_file = History(def_fname, 'w', self, opt_problem.name)

                self.sto_hst = True
                self.hot_start = False

        else:
            self.sto_hst = False
            self.hot_start = False

        gradient = Gradient(opt_problem, sens_type, sens_mode, sens_step,
                            *args, **kwargs)

        def eval_f(x, user_data=None):
            """IPOPT - Objective Value Function."""
            # Variables Groups Handling
            if opt_problem.use_groups:
                xg = {}
                for group in group_ids.keys():
                    if (group_ids[group][1] - group_ids[group][0] == 1):
                        xg[group] = x[group_ids[group][0]]
                    else:
                        xg[group] = x[group_ids[group][0]:group_ids[group][1]]

                xn = xg
            else:
                xn = x

            # Flush Output Files
            self.flushFiles()

            # Evaluate User Function
            fail = 0
            # if (myrank == 0):
            #    if self.hot_start:
            #        [vals,hist_end] = hos_file.read(ident=['obj', 'con', 'fail'])
            #        if hist_end:
            #            self.hot_start = False
            #            hos_file.close()
            #        else:
            #            [ff,gg,fail] = [vals['obj'][0][0],vals['con'][0],int(vals['fail'][0][0])]
            #
            #

            # if self.pll:
            #    self.hot_start = Bcast(self.hot_start,root=0)

            # if self.hot_start and self.pll:
            #    [ff,gg,fail] = Bcast([ff,gg,fail],root=0)
            # else:
            [ff, gg, fail] = opt_problem.obj_fun(xn, *args, **kwargs)

            # Store History
            if (myrank == 0):
                if self.sto_hst:
                    log_file.write(x, 'x')
                    log_file.write(ff, 'obj')
                    log_file.write(gg, 'con')
                    log_file.write(fail, 'fail')

                # Objective Assigment
            if isinstance(ff, complex):
                f = ff.astype(float)
            else:
                f = ff

            # Constraints Assigment
            g = numpy.zeros(len(opt_problem._constraints.keys()))
            for i in range(len(opt_problem._constraints.keys())):
                if isinstance(gg[i], complex):
                    g[i] = gg[i].astype(float)
                else:
                    g[i] = gg[i]

            return f

        def eval_g(x, user_data=None):

            # Variables Groups Handling
            if opt_problem.use_groups:
                xg = {}
                for group in group_ids.keys():
                    if (group_ids[group][1] - group_ids[group][0] == 1):
                        xg[group] = x[group_ids[group][0]]
                    else:
                        xg[group] = x[group_ids[group][0]:group_ids[group][1]]

                xn = xg
            else:
                xn = x

            # Flush Output Files
            self.flushFiles()

            # Evaluate User Function
            fail = 0
            #            if (myrank == 0):
            #                if self.hot_start:
            #                    [vals,hist_end] = hos_file.read(ident=['obj', 'con', 'fail'])
            #                    if hist_end:
            #                        self.hot_start = False
            #                        hos_file.close()
            #                    else:
            #                        [ff,gg,fail] = [vals['obj'][0][0],vals['con'][0],int(vals['fail'][0][0])]

            # if self.pll:
            #   self.hot_start = Bcast(self.hot_start,root=0)

            # if self.hot_start and self.pll:
            #    [ff,gg,fail] = Bcast([ff,gg,fail],root=0)
            # else:
            [ff, gg, fail] = opt_problem.obj_fun(xn, *args, **kwargs)

            # Store History
            if (myrank == 0):
                if self.sto_hst:
                    log_file.write(x, 'x')
                    log_file.write(ff, 'obj')
                    log_file.write(gg, 'con')
                    log_file.write(fail, 'fail')

                # Objective Assigment
            if isinstance(ff, complex):
                f = ff.astype(float)
            else:
                f = ff

            # Constraints Assigment
            g = numpy.zeros(len(opt_problem._constraints.keys()))
            for i in range(len(opt_problem._constraints.keys())):
                if isinstance(gg[i], complex):
                    g[i] = gg[i].astype(float)
                else:
                    g[i] = gg[i]

            return g

        def eval_grad_f(x, user_data=None):
            """IPOPT - Objective/Constraint Gradients Function."""
            # if self.hot_start:
            #    if (myrank == 0):
            #        [vals,hist_end] = hos_file.read(ident=['grad_obj','grad_con'])
            #        if hist_end:
            #            self.hot_start = False
            #            hos_file.close()
            #        else:
            #            dff = vals['grad_obj'][0].reshape((len(opt_problem._objectives.keys()),len(opt_problem._variables.keys())))
            #            dgg = vals['grad_con'][0].reshape((len(opt_problem._constraints.keys()),len(opt_problem._variables.keys())))
            #
            #
            #    if self.pll:
            #        self.hot_start = Bcast(self.hot_start,root=0)
            #
            #    if self.hot_start and self.pll:
            #        [dff,dgg] = Bcast([dff,dgg],root=0)
            #

            # if not self.hot_start:

            [f, g, fail] = opt_problem.obj_fun(x, *args, **kwargs)
            dff, dgg = gradient.getGrad(x, group_ids, [f], g, *args, **kwargs)

            # Store History
            if self.sto_hst and (myrank == 0):
                log_file.write(dff, 'grad_obj')
                log_file.write(dgg, 'grad_con')

            # Gradient Assignment
            df = numpy.zeros(len(opt_problem._variables.keys()))

            for i in range(len(opt_problem._variables.keys())):
                df[i] = dff[0, i]

            return df

        def eval_grad_g(x, flag, user_data=None):

            # if self.hot_start:
            #    if (myrank == 0):
            #        [vals,hist_end] = hos_file.read(ident=['grad_obj','grad_con'])
            #        if hist_end:
            #            self.hot_start = False
            #            hos_file.close()
            #        else:
            #            dff = vals['grad_obj'][0].reshape((len(opt_problem._objectives.keys()),len(opt_problem._variables.keys())))
            #            dgg = vals['grad_con'][0].reshape((len(opt_problem._constraints.keys()),len(opt_problem._variables.keys())))
            #
            #
            #    if self.pll:
            #        self.hot_start = Bcast(self.hot_start,root=0)
            #
            #    if self.hot_start and self.pll:
            #        [dff,dgg] = Bcast([dff,dgg],root=0)
            #

            # if not self.hot_start:

            if flag:
                a = numpy.zeros(
                    len(opt_problem._variables.keys()) *
                    len(opt_problem._constraints.keys()), int)
                b = numpy.zeros(
                    len(opt_problem._variables.keys()) *
                    len(opt_problem._constraints.keys()), int)

                for i in range(len(opt_problem._constraints.keys())):
                    for j in range(len(opt_problem._variables.keys())):
                        a[i * len(opt_problem._variables.keys()) + j] = i
                        b[i * len(opt_problem._variables.keys()) + j] = j
                return (a, b)

            else:
                [f, g, fail] = opt_problem.obj_fun(x, *args, **kwargs)
                dff, dgg = gradient.getGrad(x, group_ids, [f], g, *args,
                                            **kwargs)

                # Store History
                if self.sto_hst and (myrank == 0):
                    log_file.write(dff, 'grad_obj')
                    log_file.write(dgg, 'grad_con')

                # Gradient Assignment
                a = numpy.zeros([
                    len(opt_problem._variables.keys()) *
                    len(opt_problem._constraints.keys())
                ])
                for i in range(len(opt_problem._constraints.keys())):
                    for j in range(len(opt_problem._variables.keys())):
                        a[i * len(opt_problem._variables.keys()) +
                          j] = dgg[i, j]

                return a

        # Variables Handling
        nvar = len(opt_problem._variables.keys())
        xl = []
        xu = []
        xx = []
        for key in opt_problem._variables.keys():
            if opt_problem._variables[key].type == 'c':
                xl.append(opt_problem._variables[key].lower)
                xu.append(opt_problem._variables[key].upper)
                xx.append(opt_problem._variables[key].value)
            elif opt_problem._variables[key].type == 'i':
                raise IOError('IPOPT cannot handle integer design variables')
            elif opt_problem._variables[key].type == 'd':
                raise IOError('IPOPT cannot handle discrete design variables')

        xl = numpy.array(xl)
        xu = numpy.array(xu)
        xx = numpy.array(xx)

        # Variables Groups Handling
        group_ids = {}
        if opt_problem.use_groups:
            k = 0
            for key in opt_problem._vargroups.keys():
                group_len = len(opt_problem._vargroups[key]['ids'])
                group_ids[opt_problem._vargroups[key][
                    'name']] = [k, k + group_len]
                k += group_len

            # Constraints Handling
        ncon = len(opt_problem._constraints.keys())
        blc = []
        buc = []
        if ncon > 0:
            for key in opt_problem._constraints.keys():
                if opt_problem._constraints[key].type == 'e':
                    blc.append(opt_problem._constraints[key].equal)
                    buc.append(opt_problem._constraints[key].equal)
                elif opt_problem._constraints[key].type == 'i':
                    blc.append(opt_problem._constraints[key].lower)
                    buc.append(opt_problem._constraints[key].upper)

        else:
            if ((store_sol) and (myrank == 0)):
                print("Optimization Problem Does Not Have Constraints\n")
                print("Unconstrained Optimization Initiated\n")

            ncon = 1
            blc.append(-inf)
            buc.append(inf)

        blc = numpy.array(blc)
        buc = numpy.array(buc)

        # Objective Handling
        objfunc = opt_problem.obj_fun
        nobj = len(opt_problem._objectives.keys())
        ff = []
        for key in opt_problem._objectives.keys():
            ff.append(opt_problem._objectives[key].value)

        ff = numpy.array(ff)

        # Create an IPOPT instance problem
        nnzj = nvar * ncon
        nnzh = nvar * nvar
        ipopt = pyipopt.create(nvar, xl, xu, ncon, blc, buc, nnzj, nnzh,
                               eval_f, eval_grad_f, eval_g, eval_grad_g)

        # Setup Options
        optionss = self.options.copy()
        del optionss['defaults']

        for i in optionss:
            if not self.options['defaults'][i][1] == optionss[i][1]:
                if self.options[i][0].__name__ == 'int':
                    ipopt.int_option(i, self.options[i][1])

                if self.options[i][0].__name__ == 'float':
                    ipopt.num_option(i, self.options[i][1])

                if self.options[i][0].__name__ == 'str':
                    ipopt.str_option(i, self.options[i][1])

        # Run IPOPT

        t0 = time.time()
        r = ipopt.solve(xx)
        sol_time = time.time() - t0

        if (myrank == 0):
            if self.sto_hst:
                log_file.close()
                if tmp_file:
                    hos_file.close()
                    name = hos_file.filename
                    os.remove(name + '.cue')
                    os.remove(name + '.bin')
                    os.rename(name + '_tmp.cue', name + '.cue')
                    os.rename(name + '_tmp.bin', name + '.bin')

        ipopt.close()

        # Store Results
        sol_inform = {}
        print(r)
        sol_inform['value'] = r[-1]  # ifail[0]
        sol_inform['text'] = self.getInform(r[-1])  # self.getInform(ifail[0])

        if store_sol:
            sol_name = 'IPOPT Solution to ' + opt_problem.name

            sol_options = copy.copy(self.options)
            if 'default' in sol_options:
                del sol_options['defaults']

            sol_evals = 0

            sol_vars = copy.deepcopy(opt_problem._variables)
            i = 0
            x = r[0]
            for key in sol_vars.keys():
                sol_vars[key].value = x[i]
                i += 1

            sol_objs = copy.deepcopy(opt_problem._objectives)
            sol_objs[0].value = r[4]

            sol_cons = {}

            if ncon > 0:
                sol_lambda = r[3]
            else:
                sol_lambda = {}

            opt_problem.addSol(
                self.__class__.__name__,
                sol_name,
                objfunc,
                sol_time,
                sol_evals,
                sol_inform,
                sol_vars,
                sol_objs,
                sol_cons,
                sol_options,
                display_opts=disp_opts,
                Lambda=sol_lambda,
                Sensitivities=sens_type,
                myrank=myrank,
                arguments=args,
                **kwargs)

        return ff, xx, sol_inform  # ifail[0]
Ejemplo n.º 4
0
def OptHis2HTML(OptName, Alg, DesOptDir, xL, xU, DesVarNorm, inform, starttime, StatusDirectory=""):

    # ----------------------------------------------------------------------------------------------------
    # General calculations for the uppermost information table are computed like time running, algorithm
    # name, optimization problem name etc.
    # ----------------------------------------------------------------------------------------------------
    StartTime = str(starttime)[0:10] + "000"
    EndTime = ""
    RefRate = '2000'
    if inform != "Running":
        EndTime = str(time())[0:10] + "000"
        RefRate = '1000000'
    Iteration = 'Iteration'   # Label and Legends may be Iteration, Generation or Evaluations depending on Algorithm

    if StatusDirectory == "":  # Change the target directory for the status report files if the user wants to
        StatusDirectory = DesOptDir

    # Variables for the data extraction
    pos_of_best_ind = []  # position of the best individual if a GA or ES is used as algorithm
    fIter = []  # objective function array
    xIter = []  # design vector array
    gIter = []  # constraint vector array

    # template_directory= DesOpt_Base + "/.DesOptPy/_OptStatusReport/"  # directory with the html files etc.

    template_directory = os.path.dirname(
        os.path.realpath(__file__)) + "/StatusReportFiles/"  # directory with the html files etc.

    OptHist = History(OptName, "r")  # open the history file generated by pyOpt or own algorithm

    # read the obj fct, design and constraint vector values into the array

    fAll = OptHist.read([0, -1], ["obj"])[0]["obj"]
    xAll = OptHist.read([0, -1], ["x"])[0]["x"]
    gAll = OptHist.read([0, -1], ["con"])[0]["con"]

    # ----------------------------------------------------------------------------------------------------
    # The next lines manipulate the data corresponding to the used algorithm. pyOpt Histories are different
    # depending on the used algorithm
    # ----------------------------------------------------------------------------------------------------

    if Alg.name == "NLPQLP":
        gAll = [x * -1 for x in gAll]

    fGradIter = OptHist.read([0, -1], ["grad_obj"])[0]["grad_obj"]

    if Alg.name == "COBYLA":
        fIter = fAll
        xIter = xAll
        gIter = gAll

    # If NSGA2 is used the best individual of a population is considered the objective fct, otherwise every
    # individual would be shown in the graphs

    elif Alg.name == "NSGA-II":
        Iteration = 'Generation'
        if inform == 0:
            inform = 'Optimization terminated successfully'

        PopSize = Alg.options['PopSize'][1]

        for i in range(0, fAll.__len__() / PopSize):  # Iteration trough the Populations

            best_fitness = 9999999
            max_violation_of_all_g = np.empty(PopSize)
            max_violation_of_all_g.fill(99999999)

            for u in range(0, PopSize):  # Iteration trough the Individuals of the actual population
                if np.max(gAll[i * PopSize + u]) < max_violation_of_all_g[u]:
                    max_violation_of_all_g[u] = np.max(gAll[i * PopSize + u])

            pos_smallest_violation = np.argmin(max_violation_of_all_g)

            # only not feasible designs, so choose the less violated one as best

            if max_violation_of_all_g[pos_smallest_violation] > 0:
                fIter.append(fAll[i * PopSize + pos_smallest_violation])
                xIter.append(xAll[i * PopSize + pos_smallest_violation])
                gIter.append(gAll[i * PopSize + pos_smallest_violation])
            else:  # find the best feasible one
                # Iteration trough the Individuals of the actual population
                for u in range(0, PopSize):
                    if np.max(fAll[i * PopSize + u]) < best_fitness:
                        if np.max(gAll[i * PopSize + u]) <= 0:
                            best_fitness = fAll[i * PopSize + u]
                            pos_of_best_ind = i * PopSize + u

                fIter.append(fAll[pos_of_best_ind])
                xIter.append(xAll[pos_of_best_ind])
                gIter.append(gAll[pos_of_best_ind])


    else:
        fIter = [[]] * len(fGradIter)
        xIter = [[]] * len(fGradIter)
        gIter = [[]] * len(fGradIter)

        for ii in range(len(fIter)):

            Posdg = OptHist.cues["grad_con"][ii][0]
            Posf = OptHist.cues["obj"][ii][0]
            iii = 0
            while Posdg > Posf:
                iii = iii + 1
                try:
                    Posf = OptHist.cues["obj"][iii][0]
                except:
                    Posf = Posdg + 1
            iii = iii - 1
            fIter[ii] = fAll[iii]
            xIter[ii] = xAll[iii]
            gIter[ii] = gAll[iii]

    if Alg.name != "NSGA-II":
        if len(fGradIter) == 0:  # first calculation
            fIter = fAll
            xIter = xAll
            gIter = gAll

    OptHist.close()

    # convert the arrays to numpy arrays

    fIter = np.asarray(fIter)
    xIter = np.asarray(xIter)
    gIter = np.asarray(gIter)
    niter = len(fIter) - 1

    # ----------------------------------------------------------------------------------------------------
    # The design variables are normalized or denormalized so both can be displayed in the graphs and tables
    # ----------------------------------------------------------------------------------------------------

    if xIter.size != 0:
        if DesVarNorm == False:
            xIter_denormalized = np.zeros((niter + 1, len(xIter[0])))
            for y in range(0, niter + 1):
                xIter_denormalized[y] = xIter[y]
            for y in range(0, niter + 1):
                [xIter[y, :], xLnorm, xUnorm] = normalize(xIter_denormalized[y, :], xL, xU, "xLxU")
        else:
            xIter_denormalized = np.zeros((niter + 1, len(xIter[0])))
            for y in range(0, niter + 1):
                xIter_denormalized[y, :] = denormalize(xIter[y, :], xL, xU, DesVarNorm)

    time_now = strftime("%Y-%b-%d %H:%M:%S", localtime())  # update the time for the information table

    number_des_vars = "0"
    number_constraints = "0"

    # ----------------------------------------------------------------------------------------------------
    # The .csv files are created and the first row is filled with the correct labels. Those .csv files
    # are loaded by the javascript library. Afterwards the files are closed.
    # ----------------------------------------------------------------------------------------------------

    with open('objFct_maxCon.csv', 'wb') as csvfile:
        datawriter = csv.writer(csvfile, dialect='excel')
        datawriter.writerow(['Iteration', 'Objective function', 'Constraint'])
    csvfile.close()

    with open('desVarsNorm.csv', 'wb') as csvfile:
        datawriter = csv.writer(csvfile, delimiter=',', escapechar=' ', quoting=csv.QUOTE_NONE)
        labels = ['Iteration']
        if xIter.size != 0:
            for i in range(1, xIter.shape[1] + 1):
                labels = labels + ['x' + str(i)]
        datawriter.writerow(labels)
    csvfile.close()

    with open('desVars.csv', 'wb') as csvfile:
        datawriter = csv.writer(csvfile, delimiter=',', escapechar=' ', quoting=csv.QUOTE_NONE)
        labels = ['Iteration']
        if xIter.size != 0:
            for i in range(1, xIter.shape[1] + 1):
                labels = labels + ['x' + str(i)]
        datawriter.writerow(labels)
    csvfile.close()

    with open('constraints.csv', 'wb') as csvfile:
        datawriter = csv.writer(csvfile, delimiter=',', escapechar=' ', quoting=csv.QUOTE_NONE)
        labels = ['Iteration']
        if gIter.size != 0:
            for i in range(1, gIter.shape[1] + 1):
                labels = labels + ['g' + str(i)]
        datawriter.writerow(labels)
    csvfile.close()

    # ----------------------------------------------------------------------------------------------------
    # Now the real data like obj fct value and constraint values are writen into the .csv files
    # ----------------------------------------------------------------------------------------------------

    # Objective function and maximum constraint values
    for x in range(0, niter + 1):
        with open('objFct_maxCon.csv', 'ab') as csvfile:
            datawriter = csv.writer(csvfile, dialect='excel')
            datawriter.writerow([x, str(float(fIter[x])), float(np.max(gIter[x]))])
        csvfile.close()

    # Normalized design variables
    if xIter.size != 0:
        for x in range(0, niter + 1):
            datasets = str(xIter[x][:].tolist()).strip('[]')
            with open('desVarsNorm.csv', 'ab') as csvfile:
                datawriter = csv.writer(csvfile, dialect='excel', quotechar=' ')
                datawriter.writerow([x, datasets])
            csvfile.close()

    # non normalized design variables
    if xIter.size != 0:
        for x in range(0, niter + 1):
            datasets_denorm = str(xIter_denormalized[x][:].tolist()).strip('[]')
            with open('desVars.csv', 'ab') as csvfile:
                datawriter = csv.writer(csvfile, dialect='excel', quotechar=' ')
                datawriter.writerow([x, datasets_denorm])
            csvfile.close()

    # constraint variables
    if gIter.size != 0:
        for x in range(0, niter + 1):
            datasetsg = str(gIter[x][:].tolist()).strip('[]')
            with open('constraints.csv', 'ab') as csvfile:
                datawriter = csv.writer(csvfile, dialect='excel', quotechar=' ')
                datawriter.writerow([x, datasetsg])
            csvfile.close()

    # ----------------------------------------------------------------------------------------------------
    # The data for the graphs is generated, now follows the table generation routine
    # ----------------------------------------------------------------------------------------------------

    # Objective function table generation
    ObjFct_table = "<td></td>"
    if xIter.size != 0:
        if gIter.size != 0:
            for x in range(0, niter + 1):
                ObjFct_table += "<tr>\n<td>" + str(x) + "</td>\n<td>" + str(round(fIter[x], 4)) + "</td>\n<td>" + str(
                    round(np.max(gIter[x]), 4)) + "</td>\n</tr>"
        else:
            for x in range(0, niter + 1):
                ObjFct_table += "<tr>\n<td>" + str(x) + "</td>\n<td>" + str(
                    round(fIter[x], 4)) + "</td>\n<td> no constraints </td>\n</tr>"

    # Design Variable table generation
    DesVar_table = "<td></td>"
    if xIter.size != 0:
        number_des_vars = str(len(xIter[0]))

        for x in range(0, len(xIter[0])):
            DesVar_table += "<td>" + "x&#770;<sub>" + str(x + 1) + "</sub></td>" + "<td>" + "x<sub>" + str(
                x + 1) + " </sub></td>"

        for y in range(0, niter + 1):
            DesVar_table += "<tr>\n<td>" + str(y) + "</td>"
            for x in range(0, len(xIter[0])):
                DesVar_table += "<td>" + str(round(xIter[y][x], 4)) + "</td><td>" + str(
                    round(xIter_denormalized[y][x], 4)) + "</td>"
            DesVar_table += "</tr>"

    # Constraint  table generation
    Constraint_table = "<td></td>"
    if gIter.size != 0:
        number_constraints = str(len(gIter[0]))
        for x in range(0, len(gIter[0])):
            Constraint_table += "<td>" + "g<sub>" + str(x + 1) + "</sub></td>"

        for y in range(0, niter + 1):
            Constraint_table += "<tr>\n<td>" + str(y) + "</td>"
            for x in range(0, len(gIter[0])):
                if (round(gIter[y][x], 4) > 0):
                    Constraint_table += "<td class=\"negativ\">" + str(round(gIter[y][x], 4)) + "</td>"
                else:
                    Constraint_table += "<td class=\"positiv\">" + str(round(gIter[y][x], 4)) + "</td>"
            Constraint_table += "</tr>"

    # ----------------------------------------------------------------------------------------------------
    # Everything is computed, now the html master template is opened and the placeholders are replaced
    # with the right values
    # ----------------------------------------------------------------------------------------------------

    html = open(template_directory + '/initial.html', 'r')  # open template
    hstr = html.read()
    html.close()

    # replace the placeholder values with the true values
    if gIter.size != 0 or gIter.size > 100:
        hstrnew = hstr.replace('xxxxName', OptName)
        hstrnew = hstrnew.replace('xxxxTime', time_now)
        hstrnew = hstrnew.replace('xxxxtableObjFct', ObjFct_table)
        hstrnew = hstrnew.replace('xxxxtableDesVar', DesVar_table)
        hstrnew = hstrnew.replace('xxxxnumber_des_var', number_des_vars * 2)
        hstrnew = hstrnew.replace('xxxxtableConstr', Constraint_table)
        hstrnew = hstrnew.replace('xxxxnumber_constraints', number_constraints)
        hstrnew = hstrnew.replace('xxxxAlg', Alg.name)
        hstrnew = hstrnew.replace('xxxxStatus', str(inform))
        hstrnew = hstrnew.replace('xxxxRefRate', RefRate)
        hstrnew = hstrnew.replace('xxxxStartTime', StartTime)
        hstrnew = hstrnew.replace('xxxxEndTime', EndTime)
        hstrnew = hstrnew.replace('xxxxIteration', Iteration)
    else:
        hstrnew = hstr.replace('xxxxName', OptName)
        hstrnew = hstrnew.replace('xxxxTime', time_now)
        hstrnew = hstrnew.replace('xxxxtableObjFct', ObjFct_table)
        hstrnew = hstrnew.replace('xxxxtableDesVar', DesVar_table)
        hstrnew = hstrnew.replace('xxxxAlg', Alg.name)
        hstrnew = hstrnew.replace('xxxxStatus', inform)
        hstrnew = hstrnew.replace('xxxxRefRate', RefRate)
        hstrnew = hstrnew.replace('xxxxStartTime', StartTime)
        hstrnew = hstrnew.replace('xxxxEndTime', EndTime)
        hstrnew = hstrnew.replace('xxxxIteration', Iteration)

        # remove the hmtl parts which are only needed for constrained problems
        try:
            for i in range(0, 10):
                hstrnew = hstrnew[0:hstrnew.find("<!--Start of constraint html part-->")] + hstrnew[hstrnew.find(
                    "<!--End of constraint html part-->") + 34:-1]
        except:
            print ""

    # generate a new html file which is filled with the actual content
    html = open('initial1.html', 'w')
    html.write(hstrnew)
    html.close()

    # copy everything needed to the result directory
    if not os.path.exists(StatusDirectory + os.sep + "Results" + os.sep + OptName):
        os.makedirs(StatusDirectory + os.sep + "Results" + os.sep + OptName)
    shutil.copy("initial1.html",
                StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep + OptName + "_Status.html")
    shutil.copy("objFct_maxCon.csv",
                StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep + "objFct_maxCon.csv")
    shutil.copy("desVars.csv",
                StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep + "desVars.csv")
    shutil.copy("desVarsNorm.csv",
                StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep + "desVarsNorm.csv")
    shutil.copy("constraints.csv",
                StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep + "constraints.csv")
    for file in glob.glob(template_directory + "*.png"):
        shutil.copy(file, StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep)
    for file in glob.glob(template_directory + "*.js"):
        shutil.copy(file, StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep)
    for file in glob.glob(template_directory + "*.css"):
        shutil.copy(file, StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep)
    for file in glob.glob(template_directory + "*.ico"):
        shutil.copy(file, StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep)
    for file in glob.glob(template_directory + "view_results.py"):
        shutil.copy(file, StatusDirectory + os.sep + "Results" + os.sep + OptName + os.sep)
    return 0