def _start_function(db_session, data): logger.info("start_function:\n{}".format(data)) url = data.get("functionUrl") if not url: log_and_raise(HTTPStatus.BAD_REQUEST, reason="runtime error: functionUrl not specified") project, name, tag, hash_key = parse_function_uri(url) runtime = get_db().get_function(db_session, name, project, tag, hash_key) if not runtime: log_and_raise( HTTPStatus.BAD_REQUEST, reason="runtime error: function {} not found".format(url)) fn = new_function(runtime=runtime) resource = runtime_resources_map.get(fn.kind) if "start" not in resource: log_and_raise( HTTPStatus.BAD_REQUEST, reason="runtime error: 'start' not supported by this runtime") try: run_db = get_run_db_instance(db_session) fn.set_db_connection(run_db) # resp = resource["start"](fn) # TODO: handle resp? resource["start"](fn) fn.save(versioned=False) logger.info("Fn:\n %s", fn.to_yaml()) except Exception as err: logger.error(traceback.format_exc()) log_and_raise(HTTPStatus.BAD_REQUEST, reason="runtime error: {}".format(err)) return fn
def _submit(data): task = data.get('task') function = data.get('function') url = data.get('functionUrl') if not url and task: url = get_in(task, 'spec.function') if not (function or url) or not task: return json_error( HTTPStatus.BAD_REQUEST, reason='bad JSON, need to include function/url and task objects', ) # TODO: block exec for function['kind'] in ['', 'local] (must be a # remote/container runtime) try: if function: fn = new_function(runtime=function) else: if '://' in url: fn = import_function(url=url) else: project, name, tag = parse_function_uri(url) runtime = _db.get_function(name, project, tag) if not runtime: return json_error( HTTPStatus.BAD_REQUEST, reason='runtime error: function {} not found'.format( url), ) fn = new_function(runtime=runtime) fn.set_db_connection(_db, True) logger.info('func:\n{}'.format(fn.to_yaml())) # fn.spec.rundb = 'http://mlrun-api:8080' schedule = data.get('schedule') if schedule: args = (task, ) job_id = _scheduler.add(schedule, fn, args) _db.save_schedule(data) resp = {'schedule': schedule, 'id': job_id} else: resp = fn.run(task, watch=False) logger.info('resp: %s', resp.to_yaml()) except Exception as err: logger.error(traceback.format_exc()) return json_error( HTTPStatus.BAD_REQUEST, reason='runtime error: {}'.format(err), ) if not isinstance(resp, dict): resp = resp.to_dict() return jsonify(ok=True, data=resp)
def _parse_start_function_body(db_session, data): url = data.get("functionUrl") if not url: log_and_raise( HTTPStatus.BAD_REQUEST.value, reason="runtime error: functionUrl not specified", ) project, name, tag, hash_key = parse_function_uri(url) runtime = get_db().get_function(db_session, name, project, tag, hash_key) if not runtime: log_and_raise( HTTPStatus.BAD_REQUEST.value, reason="runtime error: function {} not found".format(url), ) return new_function(runtime=runtime)
def start_function(): try: data = request.get_json(force=True) except ValueError: return json_error(HTTPStatus.BAD_REQUEST, reason='bad JSON body') logger.info('start_function:\n{}'.format(data)) url = data.get('functionUrl') if not url: return json_error( HTTPStatus.BAD_REQUEST, reason='runtime error: functionUrl not specified', ) project, name, tag = parse_function_uri(url) runtime = _db.get_function(name, project, tag) if not runtime: return json_error( HTTPStatus.BAD_REQUEST, reason='runtime error: function {} not found'.format(url), ) fn = new_function(runtime=runtime) resource = runtime_resources_map.get(fn.kind) if 'start' not in resource: return json_error( HTTPStatus.BAD_REQUEST, reason='runtime error: "start" not supported by this runtime', ) try: fn.set_db_connection(_db) # resp = resource['start'](fn) # TODO: handle resp? resource['start'](fn) fn.save(versioned=False) logger.info('Fn:\n %s', fn.to_yaml()) except Exception as err: logger.error(traceback.format_exc()) return json_error( HTTPStatus.BAD_REQUEST, reason='runtime error: {}'.format(err), ) return jsonify(ok=True, data=fn.to_dict())
def submit(db_session: Session, data): task = data.get("task") function = data.get("function") url = data.get("functionUrl") if not url and task: url = get_in(task, "spec.function") if not (function or url) or not task: log_and_raise( HTTPStatus.BAD_REQUEST, reason="bad JSON, need to include function/url and task objects") # TODO: block exec for function["kind"] in ["", "local] (must be a # remote/container runtime) resp = None try: if function and not url: fn = new_function(runtime=function) else: if "://" in url: fn = import_function(url=url) else: project, name, tag, hash_key = parse_function_uri(url) runtime = get_db().get_function(db_session, name, project, tag, hash_key) if not runtime: log_and_raise( HTTPStatus.BAD_REQUEST, reason="runtime error: function {} not found".format( url)) fn = new_function(runtime=runtime) if function: fn2 = new_function(runtime=function) for attr in [ "volumes", "volume_mounts", "env", "resources", "image_pull_policy", "replicas" ]: val = getattr(fn2.spec, attr, None) if val: setattr(fn.spec, attr, val) run_db = get_run_db_instance(db_session) fn.set_db_connection(run_db, True) logger.info("func:\n{}".format(fn.to_yaml())) # fn.spec.rundb = "http://mlrun-api:8080" schedule = data.get("schedule") if schedule: args = (task, ) job_id = get_scheduler().add(schedule, fn, args) get_db().store_schedule(db_session, data) resp = {"schedule": schedule, "id": job_id} else: resp = fn.run(task, watch=False) logger.info("resp: %s", resp.to_yaml()) except Exception as err: logger.error(traceback.format_exc()) log_and_raise(HTTPStatus.BAD_REQUEST, reason="runtime error: {}".format(err)) if not isinstance(resp, dict): resp = resp.to_dict() return { "data": resp, }
def _parse_submit_run_body(db_session: Session, data): task = data.get("task") function_dict = data.get("function") function_url = data.get("functionUrl") if not function_url and task: function_url = get_in(task, "spec.function") if not (function_dict or function_url) or not task: log_and_raise( HTTPStatus.BAD_REQUEST.value, reason="bad JSON, need to include function/url and task objects", ) # TODO: block exec for function["kind"] in ["", "local] (must be a # remote/container runtime) if function_dict and not function_url: function = new_function(runtime=function_dict) else: if "://" in function_url: function = import_function(url=function_url) else: project, name, tag, hash_key = parse_function_uri(function_url) function_record = get_db().get_function( db_session, name, project, tag, hash_key ) if not function_record: log_and_raise( HTTPStatus.NOT_FOUND.value, reason="runtime error: function {} not found".format(function_url), ) function = new_function(runtime=function_record) if function_dict: # The purpose of the function dict is to enable the user to override configurations of the existing function # without modifying it - to do that we're creating a function object from the request function dict and # assign values from it to the main function object override_function = new_function(runtime=function_dict, kind=function.kind) for attribute in [ "volumes", "volume_mounts", "env", "resources", "image_pull_policy", "replicas", ]: override_value = getattr(override_function.spec, attribute, None) if override_value: if attribute == "env": for env_dict in override_value: function.set_env(env_dict["name"], env_dict["value"]) elif attribute == "volumes": function.spec.update_vols_and_mounts(override_value, []) elif attribute == "volume_mounts": # volume mounts don't have a well defined identifier (like name for volume) so we can't merge, # only override function.spec.volume_mounts = override_value elif attribute == "resources": # don't override it there are limits and requests but both are empty if override_value.get("limits", {}) or override_value.get( "requests", {} ): setattr(function.spec, attribute, override_value) else: setattr(function.spec, attribute, override_value) return function, task