Esempio n. 1
0
    def serve(port):
        try:
            import thrift
        except ImportError:
            error(
                "Apache Thrift is not installed. You can install it with : 'pip install thrift'"
            )
            sys.exit(-1)
        try:
            import os, sys

            # insert current dir first as another run_server exists under whatsopt/services
            sys.path.insert(0, os.getcwd())
            from run_server import run_server
        except ImportError as err:
            print(str(err))
            error("Server not found!")
            try:
                mda_id = get_analysis_id()
                if mda_id:
                    log(
                        f"  (use 'wop update -s' to generate server for current analysis #{mda_id})"
                    )
                else:
                    warn("No local analysis found")
                    log(
                        "  (use 'wop pull -s <id>' to generate server for the analysis #id)"
                    )
            except ValueError as err:
                warn(str(err))
            sys.exit(-1)
        run_server(port)
Esempio n. 2
0
    def login(self, api_key=None, echo=None):
        debug("login()")
        already_logged = False
        if api_key:
            self.api_key = api_key
        elif os.path.exists(API_KEY_FILENAME):
            already_logged = True
            self.api_key = self._read_api_key()
        else:
            debug("Ask for API key")
            self.api_key = self._ask_and_write_api_key()
        ok = self._test_connection(api_key)

        if not api_key and already_logged and not ok:
            # try to propose re-login
            self.logout(
                echo=False
            )  # log out silently, one may be logged on another server
            # save url again
            with open(URL_FILENAME, "w") as f:
                f.write(self._url)
            ok = self.login(api_key, echo=False)

        if not ok and echo:
            error("Login to WhatsOpt ({}) failed.".format(self.url))
            log("")
            sys.exit(-1)

        if echo:
            log("Successfully logged into WhatsOpt (%s)" % self.url)
            log("")
        return ok
Esempio n. 3
0
def load_from_hdf5(filename):
    try:
        from gemseo.algos.opt_problem import OptimizationProblem
    except ImportError:
        error("GEMSEO module not found: cannot upload hdf5")
        exit(-1)

    opt_pb = OptimizationProblem.import_hdf(filename)
    ds = opt_pb.export_to_dataset("OptimizationProblem")

    name = os.path.splitext(os.path.basename(filename))[0]
    driver_kind = "DOE"
    m = re.match(r"\w+_(doe|optim)", name)
    if m and m.group(1) == "optim":
        driver_kind = "optimizer"

    name = f"GEMSEO_{driver_kind}_ALGO"
    cases = []
    statuses = []

    for varname, data in ds.get_all_data(by_group=False, as_dict=True).items():
        for j in range(data.shape[1]):
            coord_index = -1
            if data.shape[1] > 1:
                coord_index = j
            values = data[:, j].tolist()
            cases.append({
                "varname": varname,
                "coord_index": coord_index,
                "values": values
            })

    statuses = len(cases[0]["values"]) * [1]
    return name, cases, statuses
Esempio n. 4
0
 def err_msg(resp):
     error(
         "{} ({}) : {}".format(
             resp.status_code,
             requests.status_codes._codes[resp.status_code][0],
             resp.json()["message"],
         )
     )
Esempio n. 5
0
 def convert(filename):
     if not os.path.exists(filename):
         error(f"File {filename} not found.")
     pathname, extension = os.path.splitext(filename)
     if not extension == ".sqlite":
         error(f"File {filename} should have '.sqlite' extension, got '{extension}'")
     basename = os.path.basename(pathname)
     convert_sqlite_to_csv(filename, basename)
Esempio n. 6
0
 def update_mda(self, analysis_id=None, options={}):
     mda_id = analysis_id or self.get_analysis_id()
     if mda_id is None:
         error(
             "Unknown analysis with id={} (maybe use wop pull <analysis-id>)".format(
                 mda_id
             )
         )
         exit(-1)
     opts = copy.deepcopy(options)
     opts.update({"--base": True, "--update": True})
     self.pull_mda(mda_id, opts, "Analysis %s updated" % mda_id)
Esempio n. 7
0
    def upload_vars_init_cmd(self, py_filename, options):
        def upload_vars_init(prob):
            self.upload_vars_init(prob, options)
            sys.exit()

        d = os.path.dirname(py_filename)
        run_analysis_filename = os.path.join(d, "run_analysis.py")
        if not os.path.exists(run_analysis_filename):
            error(
                f"Can not get analysis init: script {run_analysis_filename} not found."
            )
        hooks.use_hooks = True
        hooks._register_hook("final_setup", "Problem", post=upload_vars_init)
        _load_and_exec(run_analysis_filename, [])
Esempio n. 8
0
def load_state():
    state = {}
    if not os.path.exists(WOP_CONF_FILENAME):
        return state
    with open(WOP_CONF_FILENAME, "r") as f:
        for line in f.readlines():
            line = line.strip()
            if line == "" or line.startswith("#"):
                continue
            m = re.search(r"(\S+): (\S+)", line)
            if m:
                state[m.group(1)] = m.group(2)
            else:
                error(
                    f"Syntax error in {WOP_CONF_FILENAME} file: line '{line}' invalid"
                )
                exit(-1)
    return state
Esempio n. 9
0
    def show_mda(self, analysis_id, pbfile, name, outfile, batch, depth):
        options = {
            "--xdsm": True,
            "--name": name,
            "--dry-run": False,
            "--depth": depth,
        }
        xdsm = None
        if pbfile:
            start = time.time()
            try:
                info("XDSM info retrieval...")
                self.push_mda_cmd(pbfile, options)
            except AnalysisPushedException as pushed:
                xdsm = pushed.xdsm
            end = time.time()
            log("Retrieved in {:.2f}s".format(end - start))
            source = os.path.basename(pbfile)
        else:
            mda_id = analysis_id or get_analysis_id()
            if mda_id is None:
                error(
                    "Unknown analysis with id={} (maybe use wop pull <analysis-id>)".format(
                        mda_id
                    )
                )
                sys.exit(-1)
            url = self.endpoint("/api/v1/analyses/{}.xdsm".format(mda_id))
            resp = self.session.get(url, headers=self.headers)
            resp.raise_for_status()
            xdsm = resp.json()
            source = f"{mda_id}@{self._url}"

        info("XDSM building...")
        generate_xdsm_html(source, xdsm, outfile)
        if pbfile:
            log("XDSM of analysis from {} generated in {}".format(pbfile, outfile))
        else:
            log("XDSM of analysis {} generated in {}".format(mda_id, outfile))
        if not batch:
            webview(outfile)
Esempio n. 10
0
 def update_mda(self, analysis_id=None, options={}):
     mda_id = analysis_id or get_analysis_id()
     if mda_id is None:
         error(
             f"Unknown analysis with id=#{mda_id} (maybe use wop pull <analysis-id>)"
         )
         sys.exit(-1)
     # keep options unchanged, work on a copy
     opts = copy.deepcopy(options)
     # sanity checks
     if not (is_based_on(FRAMEWORK_OPENMDAO) or is_based_on(FRAMEWORK_GEMSEO)):
         error("No framework detected. Check your *_base.py files.")
         sys.exit(-1)
     if opts["--openmdao"] and opts["--gemseo"]:
         error("Please choose either --openmdao or --gemseo.")
         sys.exit(-1)
     opts.update(
         {
             "--base": True,
             "--update": True,
             "--gemseo": opts["--gemseo"]
             or (not opts["--openmdao"] and is_based_on("gemseo")),
             "--openmdao": opts["--openmdao"]
             or (not opts["--gemseo"] and is_based_on("openmdao")),
             "--package": is_package_mode(),
         }
     )
     self.pull_mda(mda_id, opts, "Analysis #{} updated".format(mda_id))
Esempio n. 11
0
    def _test_connection(self, api_key=None):
        test_api_key = api_key
        if test_api_key is None and os.path.exists(API_KEY_FILENAME):
            test_api_key = self._read_api_key()

        if test_api_key:
            self.headers = {
                "Authorization": "Token token=" + test_api_key,
                "User-Agent": "wop/{}".format(__version__),
            }
            url = self.endpoint("/api/v1/versioning")
            try:
                resp = self.session.get(url, headers=self.headers)
                # special case: bad wop version < minimal required version
                if resp.status_code == requests.codes.forbidden:
                    error(resp.json()["message"])
                    sys.exit(-1)
                return resp.ok
            except requests.exceptions.ConnectionError:
                return False
        else:
            return False
Esempio n. 12
0
def load_from_sqlite(filename, parallel=False):
    if parallel:
        m = re.match(r"(.*_)(\d+)$", filename)
        if m:
            file_prefix = m.group(1)
            file_count = int(m.group(2)) + 1
            name, cases, statuses = load_sqlite_file(filename)
            next_filename = file_prefix + str(file_count)
            while os.path.exists(next_filename):
                _, tmp_cases, tmp_statuses = load_sqlite_file(next_filename)
                for i, tmp_case in enumerate(tmp_cases):
                    cases[i]["values"].extend(tmp_case["values"])
                statuses.extend(tmp_statuses)
                file_count = file_count + 1
                next_filename = file_prefix + str(file_count)
            return name, cases, statuses
        else:
            error("In parallel mode (-p option), "
                  "filename should end with '_<number>', got {}".format(
                      filename))
            exit(-1)
    else:
        return load_sqlite_file(filename)
Esempio n. 13
0
 def upload_vars_init(self, problem, options):
     mda_id = get_analysis_id() if get_analysis_id() else options["--analysis-id"]
     if mda_id is None:
         error("Unknown analysis with id={}".format(mda_id))
         sys.exit(-1)
     parameters = []
     headers = ["variable", "init value"]
     data = []
     for s in problem.model._subsystems_myproc:
         if isinstance(s, IndepVarComp):
             for absname in s._var_abs2meta["output"]:
                 name = find_indep_var_name(problem, absname)
                 value = s._outputs._views[absname][:]
                 if isinstance(value, np.ndarray):
                     value = str(value.tolist())
                 parameters.append({"varname": name, "value": value})
     data = [[p["varname"], p["value"]] for p in parameters]
     params = {"parameterization": {"parameters": parameters}}
     log(tabulate(data, headers))
     if not options["--dry-run"]:
         url = self.endpoint(f"/api/v1/analyses/{mda_id}/parameterization")
         resp = self.session.put(url, headers=self.headers, json=params)
         resp.raise_for_status()
         log("Variables init values uploaded")
Esempio n. 14
0
 def upload_parameters(self, problem, options):
     mda_id = self.get_analysis_id()
     if mda_id is None:
         error("Unknown analysis with id={}".format(mda_id))
         exit(-1)
     parameters = []
     headers = ["parameter", "value"]
     data = []
     for s in problem.model._subsystems_myproc:
         if isinstance(s, IndepVarComp):
             for absname in s._var_allprocs_abs_names["output"]:
                 name = s._var_allprocs_abs2prom["output"][absname]
                 value = s._outputs._views[absname][:]
                 if isinstance(value, np.ndarray):
                     value = str(value.tolist())
                 parameters.append({"varname": name, "value": value})
     data = [[p["varname"], p["value"]] for p in parameters]
     params = {"parameterization": {"parameters": parameters}}
     log(tabulate(data, headers))
     if not options["--dry-run"]:
         url = self._endpoint(("/api/v1/analyses/%s/parameterization") % mda_id)
         resp = self.session.put(url, headers=self.headers, json=params)
         resp.raise_for_status()
         log("Parameters uploaded")
Esempio n. 15
0
    def upload(
        self,
        filename,
        driver_kind=None,
        analysis_id=None,
        operation_id=None,
        dry_run=False,
        outvar_count=1,
        only_success=False,
    ):
        from socket import gethostname

        mda_id = self.get_analysis_id() if not analysis_id else analysis_id

        name = cases = statuses = None
        if filename == "run_parameters_init.py":
            if mda_id is None:
                error("Unknown analysis with id={}".format(mda_id))
                exit(-1)
            self.execute(
                "run_analysis.py", self.upload_parameters_cmd, {"--dry-run": dry_run}
            )
            exit()
        elif filename.endswith(".csv"):
            name, cases, statuses = load_from_csv(filename)
        else:
            name, cases, statuses = load_from_sqlite(filename)

        if only_success:
            for c in cases:
                c["values"] = [
                    val for i, val in enumerate(c["values"]) if statuses[i] > 0
                ]
            statuses = [1 for s in statuses if s > 0]

        for c in cases:
            c["values"] = np.nan_to_num(np.array(c["values"])).tolist()

        if dry_run:
            print_cases(cases, statuses)
            exit()

        resp = None
        if operation_id:
            url = self._endpoint(("/api/v1/operations/%s") % operation_id)
            operation_params = {"cases": cases}
            resp = self.session.patch(
                url, headers=self.headers, json={"operation": operation_params}
            )
        else:
            if mda_id:
                url = self._endpoint(("/api/v1/analyses/%s/operations") % mda_id)
            else:
                url = self._endpoint("/api/v1/operations")
            if driver_kind:
                driver = "user_{}_algo".format(driver_kind)
            else:
                if name == "LHS":
                    driver = "smt_doe_lhs"
                elif name == "Morris":
                    driver = "salib_doe_morris"
                elif name == "SLSQP":
                    driver = "scipy_optimizer_slsqp"
                else:
                    # suppose name well-formed <lib>-<doe|optimizer|screening>-<algoname>
                    # otherwise it will default to doe
                    m = re.match(r"(\w+)_(doe|optimizer|screening)_(\w+)", name.lower())
                    if m:
                        driver = name.lower()
                    else:
                        driver = "user_defined_algo"
            operation_params = {
                "name": name,
                "driver": driver,
                "host": gethostname(),
                "cases": cases,
                "success": statuses,
            }
            params = {"operation": operation_params}
            if outvar_count > 0 and outvar_count < len(cases):
                params["outvar_count_hint"] = outvar_count
            resp = self.session.post(url, headers=self.headers, json=params)
        resp.raise_for_status()
        log("Results data from {} uploaded with driver {}".format(filename, driver))
Esempio n. 16
0
    def get_status(self):
        connected = self.is_connected()
        whatsopt_url = get_whatsopt_url() or self.url
        if connected:
            info("You are logged in {}".format(self.url))
        else:
            info("You are not connected.")
        mda_id = None
        try:
            mda_id = get_analysis_id()
        except ValueError as err:
            warn(str(err))
        if mda_id:
            if connected and whatsopt_url == self.url:
                info("Found local analysis code (id=#{})".format(mda_id))
                # connected to the right server from which the analysis was pulled
                url = self.endpoint("/api/v1/analyses/{}".format(mda_id))
                resp = self.session.get(url, headers=self.headers)

                if resp.ok:
                    mda = resp.json()
                    if is_based_on(FRAMEWORK_GEMSEO):
                        mda["framework"] = "GEMSEO"
                    elif is_based_on(FRAMEWORK_OPENMDAO):
                        mda["framework"] = "OpenMDAO"
                    else:  # should not happen
                        raise ValueError(
                            "No framework detected. Check your *_base.py files."
                        )
                    headers = ["id", "name", "created_at", "owner_email", "framework"]
                    data = [[mda[k] for k in headers]]
                    log(tabulate(data, headers))
                else:
                    error("Analysis not found on the server anymore (probably deleted)")
                    log(
                        "  (use 'wop push <analysis.py>' to push from an OpenMDAO code to the server)"
                    )
            else:
                info(
                    "Found local analysis code (id=#{}) "
                    "pulled from {}".format(mda_id, whatsopt_url)
                )
                if connected:
                    # connected to another server with a pulled analysis
                    warn("You are connected to a different server")
                    log(
                        "  (use 'wop push <analysis.py>' to push the local "
                        "analysis in the current server {})".format(self.url)
                    )
                    log(
                        "  (use 'wop logout' and 'wop login {}' "
                        "to log in to the right server)".format(whatsopt_url)
                    )
                else:
                    log("  (use 'wop login {}' command to log in)".format(whatsopt_url))
        else:
            info("No local analysis found")
            if connected:
                log(
                    "  (use 'wop list' and 'wop pull <id>' to retrieve an existing analysis)\n"
                    "  (use 'wop push <analysis.py>' to push from an OpenMDAO code to the server)"
                )
        log("")
Esempio n. 17
0
    def upload(
        self,
        filename,
        driver_kind=None,
        analysis_id=None,
        operation_id=None,
        dry_run=False,
        outvar_count=1,
        only_success=False,
        parallel=False,
    ):
        from socket import gethostname

        mda_id = get_analysis_id() if not analysis_id else analysis_id

        name = cases = statuses = None
        if (
            os.path.basename(filename) == "run_parameters_init.py"
            or os.path.basename(filename) == "mda_init.py"
        ):
            self.upload_vars_init_cmd(
                filename, {"--dry-run": dry_run, "--analysis-id": mda_id}
            )
        elif filename.endswith(".csv"):
            name, cases, statuses = load_from_csv(filename)
        elif filename.endswith(".sqlite"):
            name, cases, statuses = load_from_sqlite(filename, parallel)
        elif filename.endswith(".hdf5"):
            name, cases, statuses = load_from_hdf5(filename)
        else:
            error(
                f"Can not upload file {filename}: extension not recognized"
                " (should be either .csv, .sqlite or .hdf5)"
            )
            exit(-1)

        if only_success:
            for c in cases:
                c["values"] = [
                    val for i, val in enumerate(c["values"]) if statuses[i] > 0
                ]
            statuses = [1 for s in statuses if s > 0]

        for c in cases:
            c["values"] = np.nan_to_num(np.array(c["values"])).tolist()

        if dry_run:
            print_cases(cases, statuses)
            sys.exit()

        resp = None
        if operation_id:
            url = self.endpoint(("/api/v1/operations/%s") % operation_id)
            operation_params = {"cases": cases}
            resp = self.session.patch(
                url, headers=self.headers, json={"operation": operation_params}
            )
        else:
            if mda_id:
                url = self.endpoint(("/api/v1/analyses/%s/operations") % mda_id)
            else:
                url = self.endpoint("/api/v1/operations")
            if driver_kind:
                driver = "user_{}_algo".format(driver_kind)
            else:
                # suppose name well-formed <lib>-<doe|optimizer|screening>-<algoname>
                # otherwise it will default to doe
                m = re.match(r"(\w+)_(doe|optimizer|screening)_(\w+)", name.lower())
                if m:
                    driver = name.lower()
                else:
                    driver = "user_data_uploading"
            operation_params = {
                "name": name,
                "driver": driver,
                "host": gethostname(),
                "cases": cases,
                "success": statuses,
            }
            params = {"operation": operation_params}
            if outvar_count > 0 and outvar_count < len(cases):
                params["outvar_count_hint"] = outvar_count
            resp = self.session.post(url, headers=self.headers, json=params)
        resp.raise_for_status()
        log("Results data from {} uploaded with driver {}".format(filename, driver))