def post(self, **kwargs): """ API (POST) method to create a new instance It requires authentication to be passed in the form of a token that has to be linked to an existing session (login) made by a user :return: an object with the data for the created instance and an integer with the HTTP status code :rtype: Tuple(dict, integer) """ data_schema = kwargs.get("schema", "solve_model_dag") if data_schema is None: # no schema provided, no validation to do return self.post_list(data=kwargs) if data_schema == "pulp" or data_schema == "solve_model_dag": # this one we have the schema stored inside cornflow validate_and_continue(DataSchema(), kwargs["data"]) return self.post_list(data=kwargs) # for the rest of the schemas: we need to ask airflow for the schema config = current_app.config marshmallow_obj = get_schema(config, data_schema) validate_and_continue(marshmallow_obj(), kwargs["data"]) # if we're here, we validated and the data seems to fit the schema response = self.post_list(data=kwargs) log.info(f"User {self.get_user()} creates instance {response[0].id}") return response
def post(self, **kwargs): solution_schema = kwargs.pop("dag_name", None) # Check data format data = kwargs.get("data") # TODO: create a function to validate and replace data/ execution_results if data is None: # only check format if executions_results exist solution_schema = None if solution_schema == "pulp": validate_and_continue(DataSchema(), data) elif solution_schema is not None: config = current_app.config marshmallow_obj = get_schema(config, solution_schema, SOLUTION_SCHEMA) validate_and_continue(marshmallow_obj(), data) kwargs_copy = dict(kwargs) # we force the state to manual kwargs_copy["state"] = EXEC_STATE_MANUAL kwargs_copy["user_id"] = self.get_user_id() if data is not None: kwargs_copy["data"] = data item = ExecutionModel(kwargs_copy) item.save() log.info( f"User {self.get_user()} manually created the execution {item.id}") return item, 201
def put(self, idx, **req_data): """ API method to write the results of the execution It requires authentication to be passed in the form of a token that has to be linked to an existing session (login) made by the superuser created for the airflow webserver :param str idx: ID of the execution :return: A dictionary with a message (body) and an integer with the HTTP status code :rtype: Tuple(dict, integer) """ solution_schema = req_data.pop("solution_schema", "pulp") # TODO: the solution_schema maybe we should get it from the created execution_id? # at least, check they have the same schema-name # Check data format data = req_data.get("data") checks = req_data.get("checks") if data is None: # only check format if executions_results exist solution_schema = None if solution_schema == "pulp": validate_and_continue(DataSchema(), data) elif solution_schema is not None: config = current_app.config marshmallow_obj = get_schema(config, solution_schema, SOLUTION_SCHEMA) validate_and_continue(marshmallow_obj(), data) # marshmallow_obj().fields['jobs'].nested().fields['successors'] execution = ExecutionModel.get_one_object(user=self.get_user(), idx=idx) if execution is None: raise ObjectDoesNotExist() state = req_data.get("state", EXEC_STATE_CORRECT) new_data = dict( state=state, state_message=EXECUTION_STATE_MESSAGE_DICT[state], # because we do not want to store airflow's user: user_id=execution.user_id, ) # newly validated data from marshmallow if data is not None: new_data["data"] = data if checks is not None: new_data["checks"] = checks req_data.update(new_data) execution.update(req_data) # TODO: is this save necessary? execution.save() return {"message": "results successfully saved"}, 200
def post(self, idx): """ API method to copy the information stored in a case to a new instance It requires authentication to be passed in the form of a token that has to be linked to an existing session (login) made by a user :param int idx: ID of the case that has to be copied to an instance or instance and execution :return: an object with the instance or instance and execution ID that have been created and the status code :rtype: Tuple (dict, integer) """ case = CaseModel.get_one_object(user=self.get_user(), idx=idx) if case is None: raise ObjectDoesNotExist() schema = case.schema payload = { "name": "instance_from_" + case.name, "description": "Instance created from " + case.description, "data": case.data, "schema": case.schema, } if schema is None: return self.post_list(payload) if schema == "pulp" or schema == "solve_model_dag": validate_and_continue(DataSchema(), payload["data"]) return self.post_list(payload) config = current_app.config marshmallow_obj = get_schema(config, schema) validate_and_continue(marshmallow_obj(), payload["data"]) response = self.post_list(payload) log.info( f"User {self.get_user()} creates instance {response[0].id} from case {idx}" ) return response
def post(self, **kwargs): """ API method to create a new execution linked to an already existing instance It requires authentication to be passed in the form of a token that has to be linked to an existing session (login) made by a user :return: A dictionary with a message (error if authentication failed, error if data is not validated or the reference_id for the newly created execution if successful) and a integer wit the HTTP status code :rtype: Tuple(dict, integer) """ # TODO: should validation should be done even if the execution is not going to be run? # TODO: should the schema field be cross valdiated with the instance schema field? config = current_app.config if "schema" not in kwargs: kwargs["schema"] = "solve_model_dag" # TODO: review the order of these two operations # Get dag config schema and validate it marshmallow_obj = get_schema(config, kwargs["schema"], "config") validate_and_continue(marshmallow_obj(), kwargs["config"]) execution, status_code = self.post_list(data=kwargs) instance = InstanceModel.get_one_object(user=self.get_user(), idx=execution.instance_id) if instance is None: raise ObjectDoesNotExist( error="The instance to solve does not exist") # this allows testing without airflow interaction: if request.args.get("run", "1") == "0": execution.update_state(EXEC_STATE_NOT_RUN) return execution, 201 # We now try to launch the task in airflow af_client = Airflow.from_config(config) if not af_client.is_alive(): err = "Airflow is not accessible" log.error(err) execution.update_state(EXEC_STATE_ERROR_START) raise AirflowError( error=err, payload=dict( message=EXECUTION_STATE_MESSAGE_DICT[ EXEC_STATE_ERROR_START], state=EXEC_STATE_ERROR_START, ), ) # ask airflow if dag_name exists schema = execution.schema schema_info = af_client.get_dag_info(schema) # Validate that instance and dag_name are compatible marshmallow_obj = get_schema(config, schema, INSTANCE_SCHEMA) validate_and_continue(marshmallow_obj(), instance.data) info = schema_info.json() if info["is_paused"]: err = "The dag exists but it is paused in airflow" log.error(err) execution.update_state(EXEC_STATE_ERROR_START) raise AirflowError( error=err, payload=dict( message=EXECUTION_STATE_MESSAGE_DICT[ EXEC_STATE_ERROR_START], state=EXEC_STATE_ERROR_START, ), ) try: response = af_client.run_dag(execution.id, dag_name=schema) except AirflowError as err: error = "Airflow responded with an error: {}".format(err) log.error(error) execution.update_state(EXEC_STATE_ERROR) raise AirflowError( error=error, payload=dict( message=EXECUTION_STATE_MESSAGE_DICT[EXEC_STATE_ERROR], state=EXEC_STATE_ERROR, ), ) # if we succeed, we register the dag_run_id in the execution table: af_data = response.json() execution.dag_run_id = af_data["dag_run_id"] execution.update_state(EXEC_STATE_RUNNING) log.info("User {} creates execution {}".format(self.get_user_id(), execution.id)) return execution, 201