def test_render_jinja_mapping(fx_mk_temp_dir): mock_jinja_data = { "functions": [{ "x_api_name": "fn_mock_function_1" }, { "x_api_name": "fn_mock_function_2" }], "export_data": { "server_version": { "version": "35.0.0" } } } jinja_env = sdk_helpers.setup_jinja_env( "data/codegen/templates/package_template") jinja_mapping_dict = { "MANIFEST.in": ("MANIFEST.in.jinja2", mock_jinja_data), "README.md": ("README.md.jinja2", mock_jinja_data), "setup.py": ("setup.py.jinja2", mock_jinja_data), "tox.ini": ("tox.ini.jinja2", mock_jinja_data), "Dockerfile": ("Dockerfile.jinja2", mock_jinja_data), "entrypoint.sh": ("entrypoint.sh.jinja2", mock_jinja_data), "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", mock_jinja_data), "data": {}, "icons": { "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO, "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO, }, "doc": { "screenshots": { "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT } }, "test_package": { "__init__.py": ("package/__init__.py.jinja2", mock_jinja_data), "LICENSE": ("package/LICENSE.jinja2", mock_jinja_data), "components": { "__init__.py": ("package/components/__init__.py.jinja2", mock_jinja_data), }, "util": { "data": { "export.res": ("package/util/data/export.res.jinja2", mock_jinja_data) }, "__init__.py": ("package/util/__init__.py.jinja2", mock_jinja_data), "config.py": ("package/util/config.py.jinja2", mock_jinja_data), "customize.py": ("package/util/customize.py.jinja2", mock_jinja_data), "selftest.py": ("package/util/selftest.py.jinja2", mock_jinja_data), } } } CmdCodegen.render_jinja_mapping(jinja_mapping_dict, jinja_env, mock_paths.TEST_TEMP_DIR, mock_paths.TEST_TEMP_DIR) files_in_dir = sorted(os.listdir(mock_paths.TEST_TEMP_DIR)) assert files_in_dir == [ 'Dockerfile', 'MANIFEST.in', 'README.md', 'apikey_permissions.txt', 'data', 'doc', 'entrypoint.sh', 'icons', 'setup.py', 'test_package', 'tox.ini' ] files_in_icons_dir = sorted( os.listdir(os.path.join(mock_paths.TEST_TEMP_DIR, "icons"))) assert files_in_icons_dir == ['app_logo.png', 'company_logo.png'] files_in_test_package = sorted( os.listdir(os.path.join(mock_paths.TEST_TEMP_DIR, "test_package"))) assert files_in_test_package == [ 'LICENSE', '__init__.py', 'components', 'util' ] files_in_util = sorted( os.listdir( os.path.join(mock_paths.TEST_TEMP_DIR, "test_package", "util"))) assert files_in_util == [ '__init__.py', 'config.py', 'customize.py', 'data', 'selftest.py' ] files_in_util_data = sorted( os.listdir( os.path.join(mock_paths.TEST_TEMP_DIR, "test_package", package_helpers.PATH_UTIL_DATA_DIR))) assert files_in_util_data == ['export.res'] files_in_components = sorted( os.listdir( os.path.join(mock_paths.TEST_TEMP_DIR, "test_package", "components"))) assert files_in_components == ['__init__.py'] customize_py = sdk_helpers.read_file( os.path.join(mock_paths.TEST_TEMP_DIR, "test_package", "util", "customize.py")) assert ' "functions": [u"fn_mock_function_1", u"fn_mock_function_2"],\n' in customize_py
def _gen_package(args, setup_py_attributes={}): LOG.info("Generating codegen package...") if not sdk_helpers.is_valid_package_name(args.package): raise SDKException(u"'{0}' is not a valid package name".format(args.package)) # Strip off version information, if present in package base folder, to get the package name. package_name = re.split(VERSION_REGEX, args.package, 1)[0] # Get base version if we are running against a package base folder with version. base_version = ''.join(re.split(package_name, args.package)) # Get output_base, use args.output if defined, else current directory output_base = args.output if args.output else os.curdir output_base = os.path.abspath(output_base) # If --exportfile is specified, read org_export from that file if args.exportfile: LOG.info("Using local export file: %s", args.exportfile) org_export = sdk_helpers.read_local_exportfile(args.exportfile) else: # Instantiate connection to the Resilient Appliance res_client = sdk_helpers.get_resilient_client() # Generate + get latest export from Resilient Server org_export = sdk_helpers.get_latest_org_export(res_client) # Get data required for Jinja2 templates from export jinja_data = sdk_helpers.get_from_export(org_export, message_destinations=args.messagedestination, functions=args.function, workflows=args.workflow, rules=args.rule, fields=args.field, artifact_types=args.artifacttype, datatables=args.datatable, tasks=args.task, scripts=args.script) # Get 'minified' version of the export. This is used in customize.py jinja_data["export_data"] = sdk_helpers.minify_export(org_export, message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")), functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")), workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")), rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")), fields=jinja_data.get("all_fields"), artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")), datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")), tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")), phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")), scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts"))) # Add package_name to jinja_data jinja_data["package_name"] = package_name # Add version jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION) # Validate we have write permissions sdk_helpers.validate_dir_paths(os.W_OK, output_base) # Join package_name to output base (add base version if running against a folder which includes a version). output_base = os.path.join(output_base, package_name+base_version) # If the output_base directory does not exist, create it if not os.path.exists(output_base): os.makedirs(output_base) # Instansiate Jinja2 Environment with path to Jinja2 templates jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template") # This dict maps our package file structure to Jinja2 templates package_mapping_dict = { "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data), "README.md": ("README.md.jinja2", jinja_data), "setup.py": ("setup.py.jinja2", jinja_data), "tox.ini": ("tox.ini.jinja2", jinja_data), "Dockerfile": ("Dockerfile.jinja2", jinja_data), "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data), "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data), "data": {}, "icons": { "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO, "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO, }, "doc": { "screenshots": { "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT } }, package_name: { "__init__.py": ("package/__init__.py.jinja2", jinja_data), "LICENSE": ("package/LICENSE.jinja2", jinja_data), "components": { "__init__.py": ("package/components/__init__.py.jinja2", jinja_data), }, "util": { "data": { "export.res": ("package/util/data/export.res.jinja2", jinja_data) }, "__init__.py": ("package/util/__init__.py.jinja2", jinja_data), "config.py": ("package/util/config.py.jinja2", jinja_data), "customize.py": ("package/util/customize.py.jinja2", jinja_data), "selftest.py": ("package/util/selftest.py.jinja2", jinja_data), } } } # If there are Functions, add a 'tests' directory if jinja_data.get("functions"): package_mapping_dict["tests"] = {} # Loop each Function for f in jinja_data.get("functions"): # Add package_name to function data f["package_name"] = package_name # Generate function_component.py file name file_name = u"funct_{0}.py".format(f.get("export_key")) # Add to 'components' directory package_mapping_dict[package_name]["components"][file_name] = ("package/components/function.py.jinja2", f) # Add to 'tests' directory package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f) for w in jinja_data.get("workflows"): # Generate wf_xx.md file name file_name = u"wf_{0}.md".format(w.get(ResilientObjMap.WORKFLOWS)) # Add workflow to data directory package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w) newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping( jinja_mapping_dict=package_mapping_dict, jinja_env=jinja_env, target_dir=output_base, package_dir=output_base) # Log new and skipped files if newly_generated_files: LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files)) if skipped_files: LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files)) LOG.info("'codegen' complete for '%s'", package_name)
def _set_version(args): new_version = args.set_version if not sdk_helpers.is_valid_version_syntax(new_version): raise SDKException( "{0} is not a valid version".format(new_version)) new_version_int = list(map(int, (re.findall(r"\d+", new_version)))) # Get absolute path_to_src path_to_src = os.path.abspath(args.package) # Get path to setup.py file path_setup_py_file = os.path.join(path_to_src, package_helpers.BASE_NAME_SETUP_PY) # Parse the setup.py file setup_py_attributes = package_helpers.parse_setup_py( path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES) package_name = setup_py_attributes.get("name", "") LOG.info("Setting Resilient Platform version for %s to %s", package_name, new_version) # Get the customize file location. path_customize_py = package_helpers.get_configuration_py_file_path( "customize", setup_py_attributes) # Get customize.py ImportDefinition customize_py_import_definition = package_helpers.get_import_definition_from_customize_py( path_customize_py) old_version = customize_py_import_definition["server_version"][ "version"] LOG.info("Old Version: %s", old_version) LOG.info("New Version: %s", new_version) # Set the new version customize_py_import_definition["server_version"][ "version"] = new_version customize_py_import_definition["server_version"][ "major"] = new_version_int[0] customize_py_import_definition["server_version"][ "minor"] = new_version_int[1] customize_py_import_definition["server_version"][ "build_number"] = new_version_int[2] LOG.info("Loading old customize.py file") # Load the customize.py module customize_py_module = package_helpers.load_customize_py_module( path_customize_py, warn=False) # Get the 'old_params' from customize.py old_params = customize_py_module.codegen_reload_data() # Rename the old customize.py with .bak path_customize_py_bak = sdk_helpers.rename_to_bak_file( path_customize_py) # If local export file exists then save it to a .bak file. # (Older packages may not have the /util/data/export.res file) # Figure out the path of the files first dir_customize_py = os.path.dirname(path_customize_py) path_local_export_res = os.path.join( dir_customize_py, package_helpers.PATH_LOCAL_EXPORT_RES) path_local_export_res_bak = None if os.path.isfile(path_local_export_res): path_local_export_res_bak = sdk_helpers.rename_to_bak_file( path_local_export_res) try: jinja_data = sdk_helpers.get_from_export( customize_py_import_definition, message_destinations=old_params.get("message_destinations"), functions=old_params.get("functions"), workflows=old_params.get("workflows"), rules=old_params.get("actions"), fields=old_params.get("incident_fields"), artifact_types=old_params.get("incident_artifact_types"), datatables=old_params.get("datatables"), tasks=old_params.get("automatic_tasks"), scripts=old_params.get("scripts")) jinja_data["export_data"] = sdk_helpers.minify_export( customize_py_import_definition, message_destinations=sdk_helpers.get_object_api_names( ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")), functions=sdk_helpers.get_object_api_names( ResilientObjMap.FUNCTIONS, jinja_data.get("functions")), workflows=sdk_helpers.get_object_api_names( ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")), rules=sdk_helpers.get_object_api_names( ResilientObjMap.RULES, jinja_data.get("rules")), fields=jinja_data.get("all_fields"), artifact_types=sdk_helpers.get_object_api_names( ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")), datatables=sdk_helpers.get_object_api_names( ResilientObjMap.DATATABLES, jinja_data.get("datatables")), tasks=sdk_helpers.get_object_api_names( ResilientObjMap.TASKS, jinja_data.get("tasks")), phases=sdk_helpers.get_object_api_names( ResilientObjMap.PHASES, jinja_data.get("phases")), scripts=sdk_helpers.get_object_api_names( ResilientObjMap.SCRIPTS, jinja_data.get("scripts"))) # Add package_name to jinja_data jinja_data["package_name"] = package_name # Add version jinja_data["version"] = setup_py_attributes.get( "version", package_helpers.MIN_SETUP_PY_VERSION) # Instansiate Jinja2 Environment with path to Jinja2 templates for customize.py jinja_env = sdk_helpers.setup_jinja_env( "data/codegen/templates/package_template/package/util") jinja_template = jinja_env.get_template("customize.py.jinja2") LOG.info("Writing new customize.py file") # Render & write jinja2 template jinja_rendered_text = jinja_template.render(jinja_data) sdk_helpers.write_file(path_customize_py, jinja_rendered_text) # Instansiate Jinja2 Environment with path to Jinja2 templates for /util/dat/export.res #jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template/package/util/data") jinja_template = jinja_env.get_template("/data/export.res.jinja2") LOG.debug("Writing new /util/data/export.res file") # Render jinja2 template jinja_rendered_text = jinja_template.render(jinja_data) # Make sure the /util/data directory is there if it is not dir_local_export_res = os.path.dirname(path_local_export_res) if not os.path.exists(dir_local_export_res): os.makedirs(dir_local_export_res) # Write the file sdk_helpers.write_file(path_local_export_res, jinja_rendered_text) LOG.info("'dev --set-version' complete for '%s'", package_name) except Exception as err: LOG.error( u"Error running resilient-sdk dev --set-version\n\nERROR:%s", err) # This is required in finally block as user may kill using keyboard interrupt finally: # If an error occurred, customize.py does not exist, rename the backup file to original if not os.path.isfile(path_customize_py): LOG.info( u"An error occurred. Renaming customize.py.bak to customize.py" ) sdk_helpers.rename_file(path_customize_py_bak, package_helpers.BASE_NAME_CUSTOMIZE_PY) if path_local_export_res_bak and not os.path.isfile( path_local_export_res): LOG.info( u"An error occurred. Renaming /util/data/export.res.bak to export.res" ) sdk_helpers.rename_file( path_local_export_res_bak, package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
def _gen_package(args, setup_py_attributes={}): LOG.info("Generating codegen package...") sdk_helpers.is_python_min_supported_version() if os.path.exists(args.package) and not args.reload: raise SDKException(u"'{0}' already exists. Add --reload flag to regenerate it".format(args.package)) if not sdk_helpers.is_valid_package_name(args.package): raise SDKException(u"'{0}' is not a valid package name".format(args.package)) # The package_name will be specified in the args package_name = args.package # Get output_base, use args.output if defined, else current directory output_base = args.output if args.output else os.curdir output_base = os.path.abspath(output_base) # If --exportfile is specified, read org_export from that file if args.exportfile: LOG.info("Using local export file: %s", args.exportfile) org_export = sdk_helpers.read_local_exportfile(args.exportfile) else: # Instantiate connection to the Resilient Appliance res_client = sdk_helpers.get_resilient_client(path_config_file=args.config) # Generate + get latest export from Resilient Server org_export = sdk_helpers.get_latest_org_export(res_client) # Get data required for Jinja2 templates from export jinja_data = sdk_helpers.get_from_export(org_export, message_destinations=args.messagedestination, functions=args.function, workflows=args.workflow, rules=args.rule, fields=args.field, artifact_types=args.artifacttype, datatables=args.datatable, tasks=args.task, scripts=args.script, incident_types=args.incidenttype, playbooks=args.playbook) # Get 'minified' version of the export. This is used in customize.py jinja_data["export_data"] = sdk_helpers.minify_export(org_export, message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")), functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")), workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")), rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")), fields=jinja_data.get("all_fields"), artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")), datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")), tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")), phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")), scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts")), incident_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_TYPES, jinja_data.get("incident_types")), playbooks=sdk_helpers.get_object_api_names(ResilientObjMap.PLAYBOOKS, jinja_data.get("playbooks"))) # Add package_name to jinja_data jinja_data["package_name"] = package_name # Add version jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION) jinja_data["resilient_libraries_version"] = sdk_helpers.get_resilient_libraries_version_to_use() # Validate we have write permissions sdk_helpers.validate_dir_paths(os.W_OK, output_base) if not args.reload: # If this is not a reload, join package_name to output base output_base = os.path.join(output_base, package_name) # If the output_base directory does not exist, create it if not os.path.exists(output_base): os.makedirs(output_base) # Instansiate Jinja2 Environment with path to Jinja2 templates jinja_env = sdk_helpers.setup_jinja_env(constants.PACKAGE_TEMPLATE_PATH) # This dict maps our package file structure to Jinja2 templates package_mapping_dict = { "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data), "README.md": ("README.md.jinja2", jinja_data), "setup.py": ("setup.py.jinja2", jinja_data), "tox.ini": ("tox.ini.jinja2", jinja_data), "Dockerfile": ("Dockerfile.jinja2", jinja_data), "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data), "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data), "data": {}, "icons": { "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO, "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO, }, "doc": { "screenshots": { "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT } }, package_name: { "__init__.py": ("package/__init__.py.jinja2", jinja_data), "LICENSE": ("package/LICENSE.jinja2", jinja_data), "components": { "__init__.py": ("package/components/__init__.py.jinja2", jinja_data), }, "util": { "data": { "export.res": ("package/util/data/export.res.jinja2", jinja_data) }, "__init__.py": ("package/util/__init__.py.jinja2", jinja_data), "config.py": ("package/util/config.py.jinja2", jinja_data), "customize.py": ("package/util/customize.py.jinja2", jinja_data), "selftest.py": ("package/util/selftest.py.jinja2", jinja_data), } } } # If there are Functions, add a 'tests' and a 'payload_samples' directory (if in dev mode) if jinja_data.get("functions"): package_mapping_dict["tests"] = {} package_mapping_dict[package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR] = {} # Get a list of function names in export. fn_names = [f.get(ResilientObjMap.FUNCTIONS) for f in jinja_data.get("functions")] # Loop each Function for f in jinja_data.get("functions"): # Add package_name to function data f["package_name"] = package_name # Get function name fn_name = f.get(ResilientObjMap.FUNCTIONS) # Generate funct_function_component.py file name # Don't add prefix if function name already begins with "func_" or "funct_". if re.search(r"^(func|funct)_", fn_name): file_name = u"{0}.py".format(fn_name) else: file_name = u"funct_{0}.py".format(fn_name) # Check if file_name without extension already exists in functions names list. if os.path.splitext(file_name)[0] in fn_names: raise SDKException(u"File name '{0}' already in use please rename the function '{1}'." .format(file_name, fn_name)) # Add an 'atomic function' to 'components' directory else add a 'normal function' package_mapping_dict[package_name]["components"][file_name] = ("package/components/atomic_function.py.jinja2", f) # Add to 'tests' directory package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f) # Add a 'payload_samples/fn_name' directory and the files to it CmdCodegen.add_payload_samples(package_mapping_dict, fn_name, f) # Get a list of workflow names in export. wf_names = [w.get(ResilientObjMap.WORKFLOWS) for w in jinja_data.get("workflows")] for w in jinja_data.get("workflows"): # Get workflow name wf_name = w.get(ResilientObjMap.WORKFLOWS) # Generate wf_xx.md file name # Don't add prefix if workflow name already begins with "wf_". if re.search(r"^wf_", wf_name): file_name = u"{0}.md".format(wf_name) else: file_name = u"wf_{0}.md".format(wf_name) # Check if file_name without extension already exists in workflow names list. if os.path.splitext(file_name)[0] in wf_names: raise SDKException(u"File name '{0}' already in use please recreate the workflow '{1}'." .format(file_name, wf_name)) # Add workflow to data directory package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w) newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping( jinja_mapping_dict=package_mapping_dict, jinja_env=jinja_env, target_dir=output_base, package_dir=output_base) # Log new and skipped files if newly_generated_files: LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files)) if skipped_files: LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files)) LOG.info("'codegen' complete for '%s'", package_name) return output_base
def execute_command(self, args): LOG.debug("docgen called with %s", args) # Set docgen name for SDKException SDKException.command_ran = self.CMD_NAME # Get absolute path_to_src path_to_src = os.path.abspath(args.p) LOG.debug("Path to project: %s", path_to_src) # Instansiate Jinja2 Environment with path to Jinja2 templates jinja_env = sdk_helpers.setup_jinja_env("data/docgen/templates") # Load the Jinja2 Templates readme_template = jinja_env.get_template(README_TEMPLATE_NAME) # Generate path to setup.py file path_setup_py_file = os.path.join(path_to_src, package_helpers.BASE_NAME_SETUP_PY) try: # Ensure we have read permissions for setup.py sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file) except SDKException as err: err.message += "\nEnsure you are in the directory of the package you want to run docgen for" raise err # Parse the setup.py file setup_py_attributes = package_helpers.parse_setup_py( path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES) package_name = setup_py_attributes.get("name", "") # Generate paths to other required directories + files path_customize_py_file = os.path.join( path_to_src, package_name, package_helpers.PATH_CUSTOMIZE_PY) path_config_py_file = os.path.join(path_to_src, package_name, package_helpers.PATH_CONFIG_PY) path_readme = os.path.join(path_to_src, package_helpers.BASE_NAME_README) path_screenshots_dir = os.path.join(path_to_src, package_helpers.PATH_SCREENSHOTS) path_payload_samples_dir = os.path.join( path_to_src, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR) # Ensure we have read permissions for each required file and the file exists sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file, path_customize_py_file, path_config_py_file) # Check doc/screenshots directory exists, if not, create it + copy default screenshot if not os.path.isdir(path_screenshots_dir): os.makedirs(path_screenshots_dir) shutil.copy(package_helpers.PATH_DEFAULT_SCREENSHOT, path_screenshots_dir) # Get the resilient_circuits dependency string from setup.py file res_circuits_dep_str = package_helpers.get_dependency_from_install_requires( setup_py_attributes.get("install_requires"), "resilient_circuits") if not res_circuits_dep_str: res_circuits_dep_str = package_helpers.get_dependency_from_install_requires( setup_py_attributes.get("install_requires"), "resilient-circuits") # Get ImportDefinition from customize.py customize_py_import_def = package_helpers.get_import_definition_from_customize_py( path_customize_py_file) # Parse the app.configs from the config.py file jinja_app_configs = package_helpers.get_configs_from_config_py( path_config_py_file) # Get field names from ImportDefinition field_names = [] for f in customize_py_import_def.get("fields", []): f_export_key = f.get("export_key") if "incident/" in f_export_key and f_export_key not in IGNORED_INCIDENT_FIELDS: field_names.append(f.get(ResilientObjMap.FIELDS, "")) # Get data from ImportDefinition import_def_data = sdk_helpers.get_from_export( customize_py_import_def, message_destinations=sdk_helpers.get_object_api_names( ResilientObjMap.MESSAGE_DESTINATIONS, customize_py_import_def.get("message_destinations")), functions=sdk_helpers.get_object_api_names( ResilientObjMap.FUNCTIONS, customize_py_import_def.get("functions")), workflows=sdk_helpers.get_object_api_names( ResilientObjMap.WORKFLOWS, customize_py_import_def.get("workflows")), rules=sdk_helpers.get_object_api_names( ResilientObjMap.RULES, customize_py_import_def.get("actions")), fields=field_names, artifact_types=sdk_helpers.get_object_api_names( ResilientObjMap.INCIDENT_ARTIFACT_TYPES, customize_py_import_def.get("incident_artifact_types")), datatables=sdk_helpers.get_object_api_names( ResilientObjMap.DATATABLES, customize_py_import_def.get("types")), tasks=sdk_helpers.get_object_api_names( ResilientObjMap.TASKS, customize_py_import_def.get("automatic_tasks")), scripts=sdk_helpers.get_object_api_names( ResilientObjMap.SCRIPTS, customize_py_import_def.get("scripts")), playbooks=sdk_helpers.get_object_api_names( ResilientObjMap.PLAYBOOKS, customize_py_import_def.get("playbooks", []))) # Lists we use in Jinja Templates jinja_functions = self._get_function_details( import_def_data.get("functions", []), import_def_data.get("workflows", [])) jinja_scripts = self._get_script_details( import_def_data.get("scripts", [])) jinja_rules = self._get_rule_details(import_def_data.get("rules", [])) jinja_datatables = self._get_datatable_details( import_def_data.get("datatables", [])) jinja_custom_fields = self._get_custom_fields_details( import_def_data.get("fields", [])) jinja_custom_artifact_types = self._get_custom_artifact_details( import_def_data.get("artifact_types", [])) jinja_playbooks = self._get_playbook_details( import_def_data.get("playbooks", [])) # Other variables for Jinja Templates package_name_dash = package_name.replace("_", "-") server_version = customize_py_import_def.get("server_version", {}) supported_app = sdk_helpers.does_url_contain( setup_py_attributes.get("url", ""), "ibm.com/mysupport") # See if a payload_samples dir exists and use the contents for function results try: sdk_helpers.validate_dir_paths(os.R_OK, path_payload_samples_dir) for f in jinja_functions: fn_name = f.get("x_api_name") path_payload_samples_fn_name = os.path.join( path_payload_samples_dir, fn_name) path_output_json_example = os.path.join( path_payload_samples_fn_name, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE) try: sdk_helpers.validate_file_paths(os.R_OK, path_output_json_example) f["results"] = sdk_helpers.read_json_file( path_output_json_example) except SDKException as e: sdk_helpers.handle_file_not_found_error( e, u"Error getting results. No '{0}' file found for '{1}'." .format( package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE, fn_name)) except SDKException as e: sdk_helpers.handle_file_not_found_error( e, u"Error getting results. No '{0}' directory found.".format( package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)) LOG.info("Rendering README for %s", package_name_dash) # Render the README Jinja2 Templeate with parameters rendered_readme = readme_template.render({ "name_underscore": package_name, "name_dash": package_name_dash, "display_name": setup_py_attributes.get("display_name", package_name), "short_description": setup_py_attributes.get("description"), "long_description": setup_py_attributes.get("long_description"), "version": setup_py_attributes.get("version"), "server_version": server_version.get("version"), "all_dependencies": setup_py_attributes.get("install_requires", []), "res_circuits_dependency_str": res_circuits_dep_str, "author": setup_py_attributes.get("author"), "support_url": setup_py_attributes.get("url"), "supported_app": supported_app, "app_configs": jinja_app_configs[1], "functions": jinja_functions, "scripts": jinja_scripts, "rules": jinja_rules, "datatables": jinja_datatables, "custom_fields": jinja_custom_fields, "custom_artifact_types": jinja_custom_artifact_types, "playbooks": jinja_playbooks, "placeholder_string": constants.DOCGEN_PLACEHOLDER_STRING }) # Create a backup if needed of README sdk_helpers.rename_to_bak_file(path_readme, package_helpers.PATH_DEFAULT_README) LOG.info("Writing README to: %s", path_readme) # Write the new README sdk_helpers.write_file(path_readme, rendered_readme)
def test_setup_jinja_env(): jinja_env = sdk_helpers.setup_jinja_env(mock_paths.TEST_TEMP_DIR) assert isinstance(jinja_env, jinja2.Environment) assert jinja_env.loader.package_path == mock_paths.TEST_TEMP_DIR