def main(self): for d in [ "environments", "logs", "tasks", "uploads", "aaa", "conf.d", "resources/python", ]: os.makedirs( os.path.sep.join( [self._configuration["jinjamator_user_directory"], d]), exist_ok=True, ) sys.path.insert( 0, os.path.join(self._configuration["jinjamator_user_directory"], "resources/python"), ) if self._configuration["daemonize"]: from jinjamator.daemon import run as app_run app_run(self._configuration) else: # legacy cli task from jinjamator.task import JinjamatorTask task = JinjamatorTask("interactive") if self._configuration["global_defaults"]: task.configuration.merge_yaml( self._configuration["global_defaults"]) task.configuration.merge_dict(self.configuration) task._configuration.merge_dict(self._configuration) task.load_output_plugin( self.configuration["output_plugin"], self._configuration["global_output_plugins_base_dirs"], ) try: task.load(self._configuration["taskdir"]) except ValueError: if os.path.isdir(self._configuration["taskdir"]): self._log.error( f'No Tasklets found in {self._configuration["taskdir"]} -> exiting' ) else: self._log.error( f'Task directory {self._configuration["taskdir"]} not found -> exiting' ) task.run()
def get(self): """ Returns the json-schema or the whole alpacajs configuration data for the task """ args = task_arguments.parse_args(request) schema_type = args.get("schema-type", "full") environment_site = args.get( "preload-defaults-from-site") relative_task_path = request.endpoint.replace( "api.", "") inner_task = JinjamatorTask() inner_task._configuration.merge_dict( app.config["JINJAMATOR_FULL_CONFIGURATION"] ) inner_task.load(relative_task_path) if environment_site not in [None, "None"]: inner_task._configuration[ "jinjamator_site_path"] = site_path_by_name.get( environment_site) inner_task._configuration[ "jinjamator_site_name"] = environment_site inner_task.configuration.merge_yaml( "{}/defaults.yaml".format( site_path_by_name.get( environment_site))) full_schema = inner_task.get_jsonform_schema() if schema_type in ["", "full"]: response = jsonify(full_schema) elif schema_type in ["schema"]: response = jsonify( full_schema.get("schema", {})) elif schema_type in ["data"]: response = jsonify( full_schema.get("data", {})) elif schema_type in ["options"]: response = jsonify( full_schema.get("options", {})) elif schema_type in ["view"]: response = jsonify( full_schema.get("view", {})) del inner_task return response
def discover_tasks(app): """ Discovers all tasks in JINJAMATOR_TASKS_BASE_DIRECTORIES and registers a model and a corresponding REST endpoint with get and post below /tasks. """ task_arguments.add_argument( "preload-defaults-from-site", type=str, required=False, default="", choices=preload_defaults_from_site_choices, help= "Select site within environment to load defaults from, argument format is <environment_name>/<site_name>", ) for tasks_base_dir in app.config["JINJAMATOR_TASKS_BASE_DIRECTORIES"]: for file_ext in ["py", "j2"]: for tasklet_dir in glob.glob(os.path.join(tasks_base_dir, "**", f"*.{file_ext}"), recursive=True): task_dir = os.path.dirname(tasklet_dir) append = True for dir_chunk in task_dir.replace(tasks_base_dir, "").split( os.path.sep): # filter out hidden directories if dir_chunk.startswith(".") or dir_chunk in [ "__pycache__", "plugins", ]: append = False break dir_name = task_dir.replace(tasks_base_dir, "")[1:] gui = True if os.path.isfile(tasks_base_dir + os.path.sep + dir_name + os.path.sep + ".no_gui"): gui = False if append and dir_name not in available_tasks_by_path: task_id = xxhash.xxh64(task_dir).hexdigest() task_info = { "id": task_id, "path": dir_name, "base_dir": tasks_base_dir, "description": get_section_from_task_doc(task_dir) or "no description", "gui": gui, } available_tasks_by_path[dir_name] = task_info try: task = JinjamatorTask() log.debug(app.config["JINJAMATOR_FULL_CONFIGURATION"]) task._configuration.merge_dict( app.config["JINJAMATOR_FULL_CONFIGURATION"]) task.load( os.path.join(task_info["base_dir"], task_info["path"])) with app.app_context(): data = json.loads( jsonify(task.get_jsonform_schema() ["schema"]).data.decode("utf-8")) task_models[task_info["path"]] = api.schema_model( task_id, data) del task log.info(f"registered model for task {task_dir}") dynamic_role_name = f"task_{dir_name}" new_role = JinjamatorRole(name=dynamic_role_name) with app.app_context(): db.session.add(new_role) try: db.session.commit() except Exception: pass @ns.route(f"/{task_info['path']}", endpoint=task_info["path"]) class APIJinjamatorTask(Resource): @api.doc( f"get_task_{task_info['path'].replace(os.path.sep,'_')}_schema" ) @api.expect(task_arguments) @api.doc( params={ "Authorization": { "in": "header", "description": "A valid access token", } }) @require_role(role=or_( User.roles.any( JinjamatorRole.name == dynamic_role_name), User.roles.any( JinjamatorRole.name == "tasks_all"), )) def get(self): """ Returns the json-schema or the whole alpacajs configuration data for the task """ args = task_arguments.parse_args(request) schema_type = args.get("schema-type", "full") try: preload_data = json.loads( args.get("preload-data", "{}")) except TypeError: preload_data = {} preload_data = remove_redacted(preload_data)[1] environment_site = args.get( "preload-defaults-from-site") relative_task_path = request.endpoint.replace( "api.", "") inner_task = JinjamatorTask() inner_task._configuration.merge_dict( app.config["JINJAMATOR_FULL_CONFIGURATION"] ) inner_task.configuration.merge_dict( preload_data) inner_task.load(relative_task_path) if environment_site not in [None, "None", ""]: inner_task._configuration[ "jinjamator_site_path"] = site_path_by_name.get( environment_site) inner_task._configuration[ "jinjamator_site_name"] = environment_site env_name, site_name = environment_site.split( "/") roles = [ role["name"] for role in g._user.get("roles", []) ] if (f"environment_{env_name}|site_{site_name}" in roles or f"environments_all" in roles or f"administrator" in roles): inner_task.configuration.merge_yaml( "{}/defaults.yaml".format( site_path_by_name.get( environment_site))) else: abort( 403, f"User neither has no role environment_{env_name}|site_{site_name} nor environments_all nor administrator. Access denied.", ) full_schema = inner_task.get_jsonform_schema() if schema_type in ["", "full"]: response = jsonify(full_schema) elif schema_type in ["schema"]: response = jsonify( full_schema.get("schema", {})) elif schema_type in ["data"]: response = jsonify( full_schema.get("data", {})) elif schema_type in ["options"]: response = jsonify( full_schema.get("options", {})) elif schema_type in ["view"]: response = jsonify( full_schema.get("view", {})) del inner_task return response @api.doc( f"create_task_instance_for_{task_info['path'].replace(os.path.sep,'_')}" ) @api.expect(task_models[task_info["path"]], validate=False) @api.doc( params={ "Authorization": { "in": "header", "description": "A valid access token", } }) @require_role(role=or_( User.roles.any( JinjamatorRole.name == dynamic_role_name), User.roles.any( JinjamatorRole.name == "tasks_all"), )) def post(self): """ Creates an instance of the task and returns the job_id """ from jinjamator.task.celery import run_jinjamator_task from jinjamator.daemon.database import db relative_task_path = request.endpoint.replace( "api.", "") data = request.get_json() job_id = str(uuid.uuid4()) user_id = g._user["id"] job = run_jinjamator_task.apply_async( [ relative_task_path, data, data.get("output_plugin", "console"), user_id, ], task_id=job_id, created_by_user_id=user_id, ) db_job = list( db.session.query(DB_Job).filter( DB_Job.task_id == job.id)) db_job = db_job and db_job[0] if not db_job: db_job = DB_Job(job.id) db_job.status = "SCHEDULED" db_job.configuration = data db_job.jinjamator_task = relative_task_path db_job.created_by_user_id = user_id db.session.add(db_job) db.session.flush() db.session.commit() if data.get("output_plugin", "") == "json": timeout = app.config[ "JINJAMATOR_JSON_OUTPUT_PLUGIN_TIMEOUT"] while timeout > 0: db_job = list( db.session.query(DB_Job).filter( DB_Job.task_id == job.id)) db.session.flush() db.session.commit() if db_job[0].status not in [ "SCHEDULED", "PROGRESS", ]: log.debug(db_job[0].to_dict()) resp = Response( db_job[0].to_dict().get( "result").get("stdout")) resp.headers[ "Content-Type"] = "application/json" return resp sleep(0.2) timeout = timeout - 200 else: log.error( "Sync Task run failed -> Timeout") return jsonify({"job_id": job.id}) if task_info["description"]: post.__doc__ += task_info["description"] get.__doc__ += task_info["description"] except Exception as e: import traceback log.error( f"unable to register {task_dir}: {e} {traceback.format_exc()}" )
def get(self): """ Returns the json-schema or the whole alpacajs configuration data for the task """ args = task_arguments.parse_args(request) schema_type = args.get("schema-type", "full") try: preload_data = json.loads( args.get("preload-data", "{}")) except TypeError: preload_data = {} preload_data = remove_redacted(preload_data)[1] environment_site = args.get( "preload-defaults-from-site") relative_task_path = request.endpoint.replace( "api.", "") inner_task = JinjamatorTask() inner_task._configuration.merge_dict( app.config["JINJAMATOR_FULL_CONFIGURATION"] ) inner_task.configuration.merge_dict( preload_data) inner_task.load(relative_task_path) if environment_site not in [None, "None", ""]: inner_task._configuration[ "jinjamator_site_path"] = site_path_by_name.get( environment_site) inner_task._configuration[ "jinjamator_site_name"] = environment_site env_name, site_name = environment_site.split( "/") roles = [ role["name"] for role in g._user.get("roles", []) ] if (f"environment_{env_name}|site_{site_name}" in roles or f"environments_all" in roles or f"administrator" in roles): inner_task.configuration.merge_yaml( "{}/defaults.yaml".format( site_path_by_name.get( environment_site))) else: abort( 403, f"User neither has no role environment_{env_name}|site_{site_name} nor environments_all nor administrator. Access denied.", ) full_schema = inner_task.get_jsonform_schema() if schema_type in ["", "full"]: response = jsonify(full_schema) elif schema_type in ["schema"]: response = jsonify( full_schema.get("schema", {})) elif schema_type in ["data"]: response = jsonify( full_schema.get("data", {})) elif schema_type in ["options"]: response = jsonify( full_schema.get("options", {})) elif schema_type in ["view"]: response = jsonify( full_schema.get("view", {})) del inner_task return response
def run(path, task_data=False, **kwargs): """calls another jinjamator task""" if path == "../": tmp = _jinjamator.task_base_dir.split(os.path.sep) if tmp[0] == "": tmp[0] = os.path.sep path = os.path.join(*tmp[:-1]) parent_data = copy.deepcopy(_jinjamator.configuration._data) parent_private_data = copy.deepcopy(_jinjamator._configuration._data) output_plugin = ( kwargs.get("output_plugin", False) or parent_data.get("output_plugin", False) or "console" ) task = JinjamatorTask(parent_private_data.get("task_run_mode")) task._configuration.merge_dict(parent_private_data) task._parent_tasklet = _jinjamator._current_tasklet if parent_private_data.get("task_run_mode") == "background": backup = task._log.handlers[1].formatter._task task._parent_tasklet = backup._current_tasklet task._parent_task_id = id(backup) task._log.handlers[1].formatter._task = task if task_data: task.configuration.merge_dict( task_data, dict_strategy="merge", list_strategy="override", other_types_strategy="override", type_conflict_strategy="override", ) else: task.configuration.merge_dict(parent_data) task.configuration["output_plugin"] = output_plugin task._configuration["global_tasks_base_dirs"].insert(0, _jinjamator.task_base_dir) task.load(path) task.load_output_plugin( output_plugin, task._configuration.get("global_output_plugins_base_dirs") ) retval = task.run() if parent_private_data.get("task_run_mode") == "background": task._log.handlers[1].formatter._task = backup task._parent_tasklet = backup._parent_tasklet del task return retval
def run_jinjamator_task(self, path, data, output_plugin): """ Jinjamator Celery Task runner. """ self.update_state( state="PROGRESS", meta={ "status": "setting up jinjamator task run", "configuration": { "root_task_path": path }, }, ) formatter = CeleryLogFormatter() log_handler = CeleryLogHandler() log_handler.setLevel(logging.DEBUG) log_handler.setFormatter(formatter) log_handler.set_celery_task(self) log_handler.formatter.set_root_task_path(path) if "jinjamator_pre_run_tasks" in data: for pre_run_task in data["jinjamator_pre_run_tasks"]: task = JinjamatorTask() task._configuration._data["jinjamator_job_id"] = self.request.id log_handler.formatter.set_jinjamator_task(task) task._scheduler = self task._log.addHandler(log_handler) task._log.setLevel(logging.DEBUG) if "output_plugin" in pre_run_task["task"]: task.load_output_plugin(pre_run_task["task"]["output_plugin"]) else: task.load_output_plugin("console") task.configuration.merge_dict(pre_run_task["task"]["data"]) task._configuration.merge_dict( celery.conf["jinjamator_private_configuration"]) task.configuration.merge_dict(deepcopy(data)) task.load(pre_run_task["task"]["path"]) task._log.info("running pre run task {}".format( pre_run_task["task"]["path"])) if not task.run(): raise Exception("task failed") task._log.handlers.remove(log_handler) log_handler._task = None del task self.update_state( state="PROGRESS", meta={ "status": "running main task", "configuration": { "root_task_path": path } }, ) task = JinjamatorTask() task._configuration._data["jinjamator_job_id"] = self.request.id task._scheduler = self log_handler.formatter.set_jinjamator_task(task) task._log.setLevel(logging.DEBUG) task._log.addHandler(log_handler) task.load_output_plugin( output_plugin, celery.conf["jinjamator_private_configuration"] ["global_output_plugins_base_dirs"], ) task._configuration.merge_dict( celery.conf["jinjamator_private_configuration"]) task.configuration.merge_dict(data) task.load(path) if not task.run(): raise Exception("task failed") return { "status": "finished task", "stdout": task._stdout.getvalue(), "log": log_handler.contents, }
def discover_tasks(app): """ Discovers all tasks in JINJAMATOR_TASKS_BASE_DIRECTORIES and registers a model and a corresponding REST endpoint with get and post below /tasks. """ task_arguments.add_argument( "preload-defaults-from-site", type=str, required=False, default="", choices=preload_defaults_from_site_choices, help= "Select site within environment to load defaults from, argument format is <environment_name>/<site_name>", ) for tasks_base_dir in app.config["JINJAMATOR_TASKS_BASE_DIRECTORIES"]: for file_ext in ["py", "j2"]: for tasklet_dir in glob.glob(os.path.join(tasks_base_dir, "**", f"*.{file_ext}"), recursive=True): task_dir = os.path.dirname(tasklet_dir) append = True for dir_chunk in task_dir.replace(tasks_base_dir, "").split( os.path.sep): # filter out hidden directories if dir_chunk.startswith(".") or dir_chunk in [ "__pycache__" ]: append = False break dir_name = task_dir.replace(tasks_base_dir, "")[1:] if append and dir_name not in available_tasks_by_path: task_id = xxhash.xxh64(task_dir).hexdigest() task_info = { "id": task_id, "path": dir_name, "base_dir": tasks_base_dir, "description": get_section_from_task_doc(task_dir) or "no description", } available_tasks_by_path[dir_name] = task_info try: task = JinjamatorTask() log.debug(app.config["JINJAMATOR_FULL_CONFIGURATION"]) task._configuration.merge_dict( app.config["JINJAMATOR_FULL_CONFIGURATION"]) task.load( os.path.join(task_info["base_dir"], task_info["path"])) with app.app_context(): data = json.loads( jsonify(task.get_jsonform_schema() ["schema"]).data.decode("utf-8")) task_models[task_info["path"]] = api.schema_model( task_id, data) del task log.info(f"registred model for task {task_dir}") @ns.route(f"/{task_info['path']}", endpoint=task_info["path"]) class APIJinjamatorTask(Resource): @api.doc( f"get_task_{task_info['path'].replace(os.path.sep,'_')}_schema" ) @api.expect(task_arguments) def get(self): """ Returns the json-schema or the whole alpacajs configuration data for the task """ args = task_arguments.parse_args(request) schema_type = args.get("schema-type", "full") environment_site = args.get( "preload-defaults-from-site") relative_task_path = request.endpoint.replace( "api.", "") inner_task = JinjamatorTask() inner_task._configuration.merge_dict( app.config["JINJAMATOR_FULL_CONFIGURATION"] ) inner_task.load(relative_task_path) if environment_site not in [None, "None"]: inner_task._configuration[ "jinjamator_site_path"] = site_path_by_name.get( environment_site) inner_task._configuration[ "jinjamator_site_name"] = environment_site inner_task.configuration.merge_yaml( "{}/defaults.yaml".format( site_path_by_name.get( environment_site))) full_schema = inner_task.get_jsonform_schema() if schema_type in ["", "full"]: response = jsonify(full_schema) elif schema_type in ["schema"]: response = jsonify( full_schema.get("schema", {})) elif schema_type in ["data"]: response = jsonify( full_schema.get("data", {})) elif schema_type in ["options"]: response = jsonify( full_schema.get("options", {})) elif schema_type in ["view"]: response = jsonify( full_schema.get("view", {})) del inner_task return response @api.doc( f"create_task_instance_for_{task_info['path'].replace(os.path.sep,'_')}" ) @api.expect(task_models[task_info["path"]], validate=False) def post(self): """ Creates an instance of the task and returns the job_id """ from jinjamator.task.celery import ( run_jinjamator_task, ) from jinjamator.daemon.database import db relative_task_path = request.endpoint.replace( "api.", "") data = request.get_json() job_id = str(uuid.uuid4()) job = run_jinjamator_task.apply_async( [ relative_task_path, data, data.get("output_plugin", "console"), ], task_id=job_id, ) db_job = list( db.session.query(DB_Job).filter( DB_Job.task_id == job.id)) db_job = db_job and db_job[0] if not db_job: db_job = DB_Job(job.id) db_job.status = "SCHEDULED" db_job.configuration = data db_job.jinjamator_task = relative_task_path db.session.add(db_job) db.session.flush() db.session.commit() return jsonify({"job_id": job.id}) except Exception as e: import traceback log.error( f"unable to register {task_dir}: {e} {traceback.format_exc()}" )