def test_read_with_real_file(self, env_read): env_read.return_value = 'tests/fixtures/config.sample' assert Config.get('models.default_context_name') == 'pdl' assert Config.get('models.default_context_name', section='section') == 'pdl2' assert Config.get('models.default_type_name') == 'pdl' assert Config.get('models.default_type_name') == Config.get( 'models.default_type_name', section='section')
def engine_httpserver(ctx, action, params_file, initial_dataset, dataset, model, metrics, protocol, spark_conf, http_host, http_port, executor_path, max_workers, max_rpc_workers, extra_executor_parameters): logger.info("Starting http and grpc servers ...") grpcserver = None httpserver = None def _params(**kwargs): params = [] if kwargs is not None: for key, value in iteritems(kwargs): if value is not None: params.append("-{0}".format(str(key))) params.append(str(value)) return params try: optional_args = _params(id=initial_dataset, d=dataset, m=model, me=metrics, pf=params_file, c=spark_conf) grpcserver = subprocess.Popen(['marvin', 'engine-grpcserver', '-a', action, '-w', str(max_workers), '-rw', str(max_rpc_workers)] + optional_args) time.sleep(3) except: logger.exception("Could not start grpc server!") sys.exit(1) try: if not (executor_path and os.path.exists(executor_path)): executor_url = Config.get("executor_url", section="marvin") executor_path = MarvinData.download_file(executor_url, force=False) command_list = ['java'] command_list.append('-DmarvinConfig.engineHome={}'.format(ctx.obj['config']['inidir'])) command_list.append('-DmarvinConfig.ipAddress={}'.format(http_host)) command_list.append('-DmarvinConfig.port={}'.format(http_port)) command_list.append('-DmarvinConfig.protocol={}'.format(protocol)) if extra_executor_parameters: command_list.append(extra_executor_parameters) command_list.append('-jar') command_list.append(executor_path) httpserver = subprocess.Popen(command_list) except: logger.exception("Could not start http server!") grpcserver.terminate() if grpcserver else None sys.exit(1) try: while True: time.sleep(100) except KeyboardInterrupt: logger.info("Terminating http and grpc servers...") grpcserver.terminate() if grpcserver else None httpserver.terminate() if httpserver else None logger.info("Http and grpc servers terminated!") sys.exit(0)
def engine_httpserver(ctx, action, params_file, initial_dataset, dataset, model, metrics, spark_conf, http_host, http_port, executor_path, max_workers, max_rpc_workers): logger.info("Starting http and grpc servers ...") grpcserver = None httpserver = None try: grpcserver = subprocess.Popen([ 'marvin', 'engine-grpcserver', '-a', action, '-w', str(max_workers), '-rw', str(max_rpc_workers) ]) time.sleep(3) except: logger.exception("Could not start grpc server!") sys.exit(1) try: if not (executor_path and os.path.exists(executor_path)): executor_url = Config.get("executor_url", section="marvin") executor_path = MarvinData.download_file(executor_url, force=False) httpserver = subprocess.Popen([ 'java', '-DmarvinConfig.engineHome={}'.format(ctx.obj['config']['inidir']), '-DmarvinConfig.ipAddress={}'.format(http_host), '-DmarvinConfig.port={}'.format(http_port), '-jar', executor_path ]) except: logger.exception("Could not start http server!") grpcserver.terminate() if grpcserver else None sys.exit(1) try: while True: time.sleep(100) except KeyboardInterrupt: logger.info("Terminating http and grpc servers...") grpcserver.terminate() if grpcserver else None httpserver.terminate() if httpserver else None logger.info("Http and grpc servers terminated!") sys.exit(0)
#!/usr/bin/env python # coding=utf-8 from fabric.api import env from fabric.api import run from fabric.api import execute from fabric.api import cd from fabric.api import local from fabric.api import put from fabric.api import sudo from fabric.state import output from marvin_python_toolbox import __version__ as TOOLBOX_VERSION from marvin_python_toolbox.common.config import Config _host = Config.get("host", section="ssh_deployment").split(",") _port = Config.get("port", section="ssh_deployment") _user = Config.get("user", section="ssh_deployment") for h in _host: env.hosts.append("{user}@{host}:{port}".format(user=_user, host=h, port=_port)) output["everything"] = False output["running"] = True env.package = "marvin_mnist_keras_engine" env.margin_engine_executor_prefix = "/opt/marvin/engine-executor" env.margin_engine_executor_jar = "marvin-engine-executor-assembly-{version}.jar".format( version=TOOLBOX_VERSION) env.marvin_engine_executor_path = env.margin_engine_executor_prefix + "/" + env.margin_engine_executor_jar
def test_get_with_invalid_section(self, load_conf_from_file_mocked, config_fixture): load_conf_from_file_mocked.return_value = {} with pytest.raises(InvalidConfigException): Config.get('key', section='invalidsection')
def test_get_invalid_key_with_default(self, load_conf_from_file_mocked, config_fixture): load_conf_from_file_mocked.return_value = config_fixture assert 'invalidkey' not in config_fixture assert Config.get('invalidkey', default='default_value') == 'default_value'
def test_get_invalid_key(self, load_conf_from_file_mocked, config_fixture): load_conf_from_file_mocked.return_value = config_fixture assert 'invalidkey' not in config_fixture with pytest.raises(InvalidConfigException): Config.get('invalidkey')
def test_get(self, load_conf_from_file_mocked, config_fixture): load_conf_from_file_mocked.return_value = config_fixture assert Config.get('key') == config_fixture['key']
def marvin_code_export(model, **kwargs): import autopep8 import inspect import re from marvin_python_toolbox.common.config import Config print("Executing the marvin export hook script...") if model['type'] != 'notebook': return # import ipdb; ipdb.set_trace() cells = model['content']['cells'] artifacts = { 'marvin_initial_dataset': re.compile(r"(\bmarvin_initial_dataset\b)"), 'marvin_dataset': re.compile(r"(\bmarvin_dataset\b)"), 'marvin_model': re.compile(r"(\bmarvin_model\b)"), 'marvin_metrics': re.compile(r"(\bmarvin_metrics\b)") } batch_exec_pattern = re.compile( "(def\s+execute\s*\(\s*self\s*,\s*params\s*,\s*\*\*kwargs\s*\)\s*:)") online_exec_pattern = re.compile( "(def\s+execute\s*\(\s*self\s*,\s*input_message\s*,\s*params\s*,\s*\*\*kwargs\s*\)\s*:)" ) CLAZZES = { "acquisitor": "AcquisitorAndCleaner", "tpreparator": "TrainingPreparator", "trainer": "Trainer", "evaluator": "MetricsEvaluator", "ppreparator": "PredictionPreparator", "predictor": "Predictor", "feedback": "Feedback" } for cell in cells: if cell['cell_type'] == 'code' and cell["metadata"].get( "marvin_cell", False): source = cell["source"] new_source = autopep8.fix_code(source, options={'max_line_length': 160}) marvin_action = cell["metadata"]["marvin_cell"] marvin_action_clazz = getattr(__import__(Config.get("package")), CLAZZES[marvin_action]) source_path = inspect.getsourcefile(marvin_action_clazz) fnew_source_lines = [] for new_line in new_source.split("\n"): fnew_line = " " + new_line + "\n" if new_line.strip( ) else "\n" if not new_line.startswith( "import") and not new_line.startswith( "from") and not new_line.startswith("print"): for artifact in artifacts.keys(): fnew_line = re.sub(artifacts[artifact], 'self.' + artifact, fnew_line) fnew_source_lines.append(fnew_line) if marvin_action == "predictor": fnew_source_lines.append(" return final_prediction\n") exec_pattern = online_exec_pattern elif marvin_action == "ppreparator": fnew_source_lines.append(" return input_message\n") exec_pattern = online_exec_pattern elif marvin_action == "feedback": fnew_source_lines.append( " return \"Thanks for the feedback!\"\n") exec_pattern = online_exec_pattern else: exec_pattern = batch_exec_pattern fnew_source = "".join(fnew_source_lines) with open(source_path, 'r+') as fp: lines = fp.readlines() fp.seek(0) for line in lines: if re.findall(exec_pattern, line): fp.write(line) fp.write(fnew_source) fp.truncate() break else: fp.write(line) print("File {} updated!".format(source_path)) print("Finished the marvin export hook script...")