def test_set_version(fx_copy_fn_main_mock_integration, fx_get_sub_parser, fx_cmd_line_args_dev_set_version): mock_integration_name = fx_copy_fn_main_mock_integration[0] path_fn_main_mock_integration = fx_copy_fn_main_mock_integration[1] # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location sys.argv[sys.argv.index(mock_integration_name)] = path_fn_main_mock_integration # Parse the setup.py file path_setup_py_file = os.path.join(path_fn_main_mock_integration, package_helpers.BASE_NAME_SETUP_PY) setup_py_attributes = package_helpers.parse_setup_py(path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES) # Get customize.py ImportDefinition path_customize_py = package_helpers.get_configuration_py_file_path("customize", setup_py_attributes) customize_py_import_definition = package_helpers.get_import_definition_from_customize_py(path_customize_py) # Get the old_version old_version = customize_py_import_definition["server_version"]["version"] assert old_version == "41.0.6783" # Run _set_version cmd_dev = CmdDev(fx_get_sub_parser) args = cmd_dev.parser.parse_known_args()[0] cmd_dev._set_version(args) # Get the new_version customize_py_import_definition = package_helpers.get_import_definition_from_customize_py(path_customize_py) new_version = customize_py_import_definition["server_version"]["version"] assert new_version == "35.0.0"
def test_add_tag_to_import_definition(): # TODO: add better mock data to test these tag_name = "mock_tag_name" import_def = package_helpers.get_import_definition_from_customize_py( mock_paths.MOCK_CUSTOMIZE_PY) tagged_import_def = package_helpers.add_tag_to_import_definition( tag_name, package_helpers.SUPPORTED_RES_OBJ_NAMES, import_def)
def test_get_custom_fields_details(): import_definition = package_helpers.get_import_definition_from_customize_py( mock_paths.MOCK_CUSTOMIZE_PY) import_def_data = sdk_helpers.get_from_export( import_definition, fields=["mock_field_number", "mock_field_text_area"]) field_details = CmdDocgen._get_custom_fields_details( import_def_data.get("fields")) field_one = next(x for x in field_details if x["api_name"] == "mock_field_number") field_two = next(x for x in field_details if x["api_name"] == "mock_field_text_area") mock_field_one = { 'api_name': u'mock_field_number', 'label': u'Mock: ล ฦ ว ศ ษ ส ห ฬ อ field number', 'type': u'number', 'prefix': u'properties', 'placeholder': u'-', 'tooltip': u'a mock tooltip ล ฦ ว ศ ษ ส ห ฬ อ' } mock_field_two = { 'api_name': u'mock_field_text_area', 'label': u'Mock: Field Text Area ล ฦ ว ศ ษ ส ห ฬ อ', 'type': u'textarea', 'prefix': u'properties', 'placeholder': u'-', 'tooltip': u'a tooltip ล ฦ ว ศ ษ ส ห ฬ อ' } assert field_one == mock_field_one assert field_two == mock_field_two
def test_get_datatable_details(): import_definition = package_helpers.get_import_definition_from_customize_py( mock_paths.MOCK_CUSTOMIZE_PY) import_def_data = sdk_helpers.get_from_export( import_definition, datatables=["mock_data_table"]) datatable_details = CmdDocgen._get_datatable_details( import_def_data.get("datatables")) the_datatable = datatable_details[0] mock_datatable = { 'name': u'Mock: Data Table ล ฦ ว ศ ษ ส ห ฬ อ', 'anchor': u'mock-data-table--ล-ฦ-ว-ศ-ษ-ส-ห-ฬ-อ', 'api_name': u'mock_data_table', 'simple_name': u'mock-data-table----------', 'columns': [{ 'name': u'mock col one', 'api_name': u'mock_col_one', 'type': u'text', 'tooltip': u'a tooltip ล ฦ ว ศ ษ ส ห ฬ อ' }, { 'name': u'mock ล ฦ ว ศ ษ ส ห ฬ อ col two', 'api_name': u'mok_col_two', 'type': u'number', 'tooltip': u'tooltip ล ฦ ว ศ ษ ส ห ฬ อ' }] } assert the_datatable == mock_datatable
def test_get_import_definition_from_customize_py(): import_def = package_helpers.get_import_definition_from_customize_py(mock_paths.MOCK_CUSTOMIZE_PY) functions = import_def.get("functions") fn = next(x for x in functions if x["export_key"] == "mock_function_one") fn_description = fn.get("description")["content"] assert isinstance(import_def, dict) assert fn_description == u"A mock description of mock_function_one with unicode: ล ฦ ว ศ ษ ส ห ฬ อ"
def test_get_rule_details(): import_definition = package_helpers.get_import_definition_from_customize_py(mock_paths.MOCK_CUSTOMIZE_PY) import_def_data = sdk_helpers.get_from_export(import_definition, rules=["Mock: Auto Rule"]) rule_details = CmdDocgen._get_rule_details(import_def_data.get("rules")) the_rule = rule_details[0] mock_rule = {'name': u'Mock: Auto Rule', 'object_type': u'incident', 'workflow_triggered': u'mock_workflow_one', 'simple_name': u'mock-auto-rule'} assert the_rule == mock_rule
def test_get_fn_input_details(): import_definition = package_helpers.get_import_definition_from_customize_py(mock_paths.MOCK_CUSTOMIZE_PY) import_def_data = sdk_helpers.get_from_export(import_definition, functions=["mock_function_two"]) fn = import_def_data.get("functions")[0] fn_inputs = CmdDocgen._get_fn_input_details(fn) fn_input = next(x for x in fn_inputs if x["api_name"] == "mock_input_number") mock_input = {'api_name': u'mock_input_number', 'name': u'mock_input_number', 'type': 'number', 'required': 'Yes', 'placeholder': u'-', 'tooltip': u'a mock tooltip ล ฦ ว ศ ษ ส ห ฬ อ'} assert fn_input == mock_input
def test_get_script_details(): import_definition = package_helpers.get_import_definition_from_customize_py(mock_paths.MOCK_CUSTOMIZE_PY) import_def_data = sdk_helpers.get_from_export(import_definition, scripts=["Mock Script One"]) scripts = import_def_data.get("scripts") script_details = CmdDocgen._get_script_details(scripts) the_script = script_details[0] assert the_script.get("name") == "Mock Script One" assert the_script.get("simple_name") == "mock-script-one" assert the_script.get("anchor") == "mock-script-one" assert the_script.get("description") == "a sample Artifact script" assert the_script.get("object_type") == "artifact" assert the_script.get("script_text") == """log.info("Print this message")"""
def test_get_custom_artifact_details(): import_definition = package_helpers.get_import_definition_from_customize_py(mock_paths.MOCK_CUSTOMIZE_PY) import_def_data = sdk_helpers.get_from_export(import_definition, artifact_types=["mock_artifact_2"]) artifact_details = CmdDocgen._get_custom_artifact_details(import_def_data.get("artifact_types")) the_artifact = artifact_details[0] mock_artifact = { 'api_name': u'mock_artifact_2', 'display_name': u'Mock Artifact 2 ㌎ ㌏ ㌐ ㌑ ㌒ ㌓ ㌔ ㌕ ㌖', 'description': u'㌎ ㌏ ㌐ ㌑ ㌒ ㌓ ㌔ ㌕ ㌖ ㌎ ㌏ ㌐ ㌑ ㌒ ㌓ ㌔ ㌕ ㌖asdf ㌎ ㌏ ㌐ ㌑ ㌒ ㌓ ㌔ ㌕ ㌖' } assert the_artifact == mock_artifact
def test_get_function_details(): import_definition = package_helpers.get_import_definition_from_customize_py(mock_paths.MOCK_CUSTOMIZE_PY) import_def_data = sdk_helpers.get_from_export(import_definition, functions=["mock_function_two"], workflows=["mock_workflow_two"]) functions = import_def_data.get("functions") workflows = import_def_data.get("workflows") function_details = CmdDocgen._get_function_details(functions, workflows) the_function = function_details[0] assert the_function.get("name") == u"mock function ล ฦ ว ศ ษ ส ห ฬ อ two" assert the_function.get("pre_processing_script") in u"""# mock pre script of function ล ฦ ว ศ ษ ส ห ฬ อ ล ฦ ว ศ ษ ส ห ฬ อ ล ฦ ว ศ ษ ส ห ฬ อ two:\n\ninputs.mock_input_boolean = False\ninputs.mock_input_number = 1001\ninputs.mock_input_text = u" ล ฦ ว ศ ษ ส ห ฬ อ ล ฦ ว ศ ษ ส ห ฬ อ ramdom text" """ assert the_function.get("post_processing_script") is None
def test_reload_package_w_playbook( fx_copy_fn_main_mock_integration_w_playbooks, fx_get_sub_parser, fx_cmd_line_args_codegen_reload): output_path = os.path.join(mock_paths.TEST_TEMP_DIR, "mock_path", "fn_main_mock_integration-1.1.0") mock_integration_name = fx_copy_fn_main_mock_integration_w_playbooks[0] shutil.move(fx_copy_fn_main_mock_integration_w_playbooks[1], output_path) # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location sys.argv[sys.argv.index(mock_integration_name)] = output_path # Add path to a mock export.res file sys.argv.extend(["-e", mock_paths.MOCK_RELOAD_EXPORT_RES_W_PLAYBOOK]) cmd_codegen = CmdCodegen(fx_get_sub_parser) args = cmd_codegen.parser.parse_known_args()[0] path_package_reloaded = cmd_codegen._reload_package(args) import_definition = package_helpers.get_import_definition_from_customize_py( os.path.join(path_package_reloaded, mock_integration_name, package_helpers.PATH_CUSTOMIZE_PY)) res_objs = sdk_helpers.get_from_export( import_definition, rules=["Additional Mock Rule", "Mock Manual Rule"], playbooks=["main_mock_playbook"]) general_test_package_structure(mock_integration_name, path_package_reloaded) assert helpers.verify_expected_list( ["Additional Mock Rule", "Mock Manual Rule"], [o.get("x_api_name") for o in res_objs.get("rules")]) assert helpers.verify_expected_list( ["main_mock_playbook"], [o.get("x_api_name") for o in res_objs.get("playbooks")])
def _set_version(args): new_version = args.set_version if not sdk_helpers.is_valid_version_syntax(new_version): raise SDKException( "{0} is not a valid version".format(new_version)) new_version_int = list(map(int, (re.findall(r"\d+", new_version)))) # Get absolute path_to_src path_to_src = os.path.abspath(args.package) # Get path to setup.py file path_setup_py_file = os.path.join(path_to_src, package_helpers.BASE_NAME_SETUP_PY) # Parse the setup.py file setup_py_attributes = package_helpers.parse_setup_py( path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES) package_name = setup_py_attributes.get("name", "") LOG.info("Setting Resilient Platform version for %s to %s", package_name, new_version) # Get the customize file location. path_customize_py = package_helpers.get_configuration_py_file_path( "customize", setup_py_attributes) # Get customize.py ImportDefinition customize_py_import_definition = package_helpers.get_import_definition_from_customize_py( path_customize_py) old_version = customize_py_import_definition["server_version"][ "version"] LOG.info("Old Version: %s", old_version) LOG.info("New Version: %s", new_version) # Set the new version customize_py_import_definition["server_version"][ "version"] = new_version customize_py_import_definition["server_version"][ "major"] = new_version_int[0] customize_py_import_definition["server_version"][ "minor"] = new_version_int[1] customize_py_import_definition["server_version"][ "build_number"] = new_version_int[2] LOG.info("Loading old customize.py file") # Load the customize.py module customize_py_module = package_helpers.load_customize_py_module( path_customize_py, warn=False) # Get the 'old_params' from customize.py old_params = customize_py_module.codegen_reload_data() # Rename the old customize.py with .bak path_customize_py_bak = sdk_helpers.rename_to_bak_file( path_customize_py) # If local export file exists then save it to a .bak file. # (Older packages may not have the /util/data/export.res file) # Figure out the path of the files first dir_customize_py = os.path.dirname(path_customize_py) path_local_export_res = os.path.join( dir_customize_py, package_helpers.PATH_LOCAL_EXPORT_RES) path_local_export_res_bak = None if os.path.isfile(path_local_export_res): path_local_export_res_bak = sdk_helpers.rename_to_bak_file( path_local_export_res) try: jinja_data = sdk_helpers.get_from_export( customize_py_import_definition, message_destinations=old_params.get("message_destinations"), functions=old_params.get("functions"), workflows=old_params.get("workflows"), rules=old_params.get("actions"), fields=old_params.get("incident_fields"), artifact_types=old_params.get("incident_artifact_types"), datatables=old_params.get("datatables"), tasks=old_params.get("automatic_tasks"), scripts=old_params.get("scripts")) jinja_data["export_data"] = sdk_helpers.minify_export( customize_py_import_definition, message_destinations=sdk_helpers.get_object_api_names( ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")), functions=sdk_helpers.get_object_api_names( ResilientObjMap.FUNCTIONS, jinja_data.get("functions")), workflows=sdk_helpers.get_object_api_names( ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")), rules=sdk_helpers.get_object_api_names( ResilientObjMap.RULES, jinja_data.get("rules")), fields=jinja_data.get("all_fields"), artifact_types=sdk_helpers.get_object_api_names( ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")), datatables=sdk_helpers.get_object_api_names( ResilientObjMap.DATATABLES, jinja_data.get("datatables")), tasks=sdk_helpers.get_object_api_names( ResilientObjMap.TASKS, jinja_data.get("tasks")), phases=sdk_helpers.get_object_api_names( ResilientObjMap.PHASES, jinja_data.get("phases")), scripts=sdk_helpers.get_object_api_names( ResilientObjMap.SCRIPTS, jinja_data.get("scripts"))) # Add package_name to jinja_data jinja_data["package_name"] = package_name # Add version jinja_data["version"] = setup_py_attributes.get( "version", package_helpers.MIN_SETUP_PY_VERSION) # Instansiate Jinja2 Environment with path to Jinja2 templates for customize.py jinja_env = sdk_helpers.setup_jinja_env( "data/codegen/templates/package_template/package/util") jinja_template = jinja_env.get_template("customize.py.jinja2") LOG.info("Writing new customize.py file") # Render & write jinja2 template jinja_rendered_text = jinja_template.render(jinja_data) sdk_helpers.write_file(path_customize_py, jinja_rendered_text) # Instansiate Jinja2 Environment with path to Jinja2 templates for /util/dat/export.res #jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template/package/util/data") jinja_template = jinja_env.get_template("/data/export.res.jinja2") LOG.debug("Writing new /util/data/export.res file") # Render jinja2 template jinja_rendered_text = jinja_template.render(jinja_data) # Make sure the /util/data directory is there if it is not dir_local_export_res = os.path.dirname(path_local_export_res) if not os.path.exists(dir_local_export_res): os.makedirs(dir_local_export_res) # Write the file sdk_helpers.write_file(path_local_export_res, jinja_rendered_text) LOG.info("'dev --set-version' complete for '%s'", package_name) except Exception as err: LOG.error( u"Error running resilient-sdk dev --set-version\n\nERROR:%s", err) # This is required in finally block as user may kill using keyboard interrupt finally: # If an error occurred, customize.py does not exist, rename the backup file to original if not os.path.isfile(path_customize_py): LOG.info( u"An error occurred. Renaming customize.py.bak to customize.py" ) sdk_helpers.rename_file(path_customize_py_bak, package_helpers.BASE_NAME_CUSTOMIZE_PY) if path_local_export_res_bak and not os.path.isfile( path_local_export_res): LOG.info( u"An error occurred. Renaming /util/data/export.res.bak to export.res" ) sdk_helpers.rename_file( path_local_export_res_bak, package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
def test_reload_package(fx_copy_fn_main_mock_integration, fx_get_sub_parser, fx_cmd_line_args_codegen_reload): """ This tests that when a package is reloaded with codegen --reload that each of the EXPECTED_FILES exist and also the additional 'Additional Mock Rule' and its related Workflow which has a Function is also added to the package """ output_path = os.path.join(mock_paths.TEST_TEMP_DIR, "mock_path", "fn_main_mock_integration-1.1.0") mock_integration_name = fx_copy_fn_main_mock_integration[0] shutil.move(fx_copy_fn_main_mock_integration[1], output_path) # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location sys.argv[sys.argv.index(mock_integration_name)] = output_path # Add path to a mock export.res file sys.argv.extend(["-e", mock_paths.MOCK_RELOAD_EXPORT_RES]) cmd_codegen = CmdCodegen(fx_get_sub_parser) args = cmd_codegen.parser.parse_known_args()[0] path_package_reloaded = cmd_codegen._reload_package(args) # This is really getting the import definition from the new data/export.res file, so tests that as well import_definition = package_helpers.get_import_definition_from_customize_py( os.path.join(path_package_reloaded, mock_integration_name, package_helpers.PATH_CUSTOMIZE_PY)) res_objs = sdk_helpers.get_from_export( import_definition, rules=["Additional Mock Rule", "Mock Manual Rule"], functions=[ "additional_mock_function", "mock_function_one", "funct_new_mock_function", "func_new_mock_function" ], workflows=[ "additional_mock_workflow", "mock_workflow_one", "wf_new_mock_workflow" ]) # Assert the general structure of the reloaded package general_test_package_structure(mock_integration_name, path_package_reloaded) # Assert the additional rule, function and workflow were added assert helpers.verify_expected_list( ["Additional Mock Rule", "Mock Manual Rule"], [o.get("x_api_name") for o in res_objs.get("rules")]) assert helpers.verify_expected_list([ "additional_mock_function", "mock_function_one", "funct_new_mock_function", "func_new_mock_function" ], [o.get("x_api_name") for o in res_objs.get("functions")]) assert helpers.verify_expected_list([ "additional_mock_workflow", "mock_workflow_one", "wf_new_mock_workflow" ], [o.get("x_api_name") for o in res_objs.get("workflows")]) # Assert a new components file is created expected_component_files = EXPECTED_FILES_PACKAGE_COMPONENTS_DIR + [ "funct_additional_mock_function.py" ] assert helpers.verify_expected_list( expected_component_files, os.listdir( os.path.join(path_package_reloaded, mock_integration_name, "components"))) # Assert a new components file with prefix 'funct_' is created expected_component_files = ["funct_new_mock_function.py"] assert helpers.verify_expected_list( expected_component_files, os.listdir( os.path.join(path_package_reloaded, mock_integration_name, "components"))) # Assert a new components file with prefix 'func_' is created expected_component_files = ["func_new_mock_function.py"] assert helpers.verify_expected_list( expected_component_files, os.listdir( os.path.join(path_package_reloaded, mock_integration_name, "components"))) # Assert a new tests file is created expected_test_files = EXPECTED_FILES_TESTS_DIR + [ "test_funct_additional_mock_function.py" ] assert helpers.verify_expected_list( expected_test_files, os.listdir(os.path.join(path_package_reloaded, "tests"))) # Assert a new tests file including 'func_' is created. expected_test_files = ["test_func_new_mock_function.py"] assert helpers.verify_expected_list( expected_test_files, os.listdir(os.path.join(path_package_reloaded, "tests"))) # Assert a new md file is created in data dir expected_workflow_files = EXPECTED_FILES_DATA_DIR + [ "wf_additional_mock_workflow.md" ] assert helpers.verify_expected_list( expected_workflow_files, os.listdir(os.path.join(path_package_reloaded, "data"))) # Assert a new md file with 'wf_' is created in data dir expected_workflow_files = ["wf_new_mock_workflow.md"] assert helpers.verify_expected_list( expected_workflow_files, os.listdir(os.path.join(path_package_reloaded, "data"))) # Remove files from generated package path and recreate without prefix or substring of 'funct_' or 'wf_'. os.remove( os.path.join(path_package_reloaded, mock_integration_name, "components", "funct_additional_mock_function.py")) Path( os.path.join(path_package_reloaded, mock_integration_name, "components", "additional_mock_function.py")).touch() os.remove( os.path.join(path_package_reloaded, "tests", "test_funct_additional_mock_function.py")) Path( os.path.join(path_package_reloaded, "tests", "test_additional_mock_function.py")).touch() os.remove( os.path.join(path_package_reloaded, "data", "wf_additional_mock_workflow.md")) Path( os.path.join(path_package_reloaded, "data", "additional_mock_workflow.md")).touch() # Get modification time for workflow file "wf_mock_workflow_one.md" in seconds since the epoch.' wf_modified_time = os.path.getmtime( os.path.join(path_package_reloaded, "data", "wf_mock_workflow_one.md")) # Perform another test reload. cmd_codegen = CmdCodegen(fx_get_sub_parser) args = cmd_codegen.parser.parse_known_args()[0] path_package_reloaded = cmd_codegen._reload_package(args) # This is really getting the import definition from the new data/export.res file, so tests that as well import_definition = package_helpers.get_import_definition_from_customize_py( os.path.join(path_package_reloaded, mock_integration_name, package_helpers.PATH_CUSTOMIZE_PY)) res_objs = sdk_helpers.get_from_export( import_definition, rules=["Additional Mock Rule", "Mock Manual Rule"], functions=[ "additional_mock_function", "mock_function_one", "funct_new_mock_function", "func_new_mock_function" ], workflows=[ "additional_mock_workflow", "mock_workflow_one", "wf_new_mock_workflow" ]) # Assert the general structure of the reloaded package general_test_package_structure(mock_integration_name, path_package_reloaded) # Assert a new components file with 'funct_' prefix is not created expected_component_files = ["funct_additional_mock_function.py"] assert not helpers.verify_expected_list( expected_component_files, os.listdir( os.path.join(path_package_reloaded, mock_integration_name, "components"))) # Assert a new workflow file with 'md_' prefix is not created in data dir expected_workflow_files = ["wf_additional_mock_workflow.md"] assert not helpers.verify_expected_list( expected_workflow_files, os.listdir(os.path.join(path_package_reloaded, "data"))) # Assert a new tests file with "funct_" substring is not created expected_test_files = ["test_func_additional_mock_function.py"] assert not helpers.verify_expected_list( expected_test_files, os.listdir(os.path.join(path_package_reloaded, "tests"))) # Get new modification time for test workflow file. new_wf_modified_time = os.path.getmtime( os.path.join(path_package_reloaded, "data", "wf_mock_workflow_one.md")) # Assert modification time of workflow has been updated. assert new_wf_modified_time > wf_modified_time
def execute_command(self, args): LOG.debug("docgen called with %s", args) # Set docgen name for SDKException SDKException.command_ran = self.CMD_NAME # Get absolute path_to_src path_to_src = os.path.abspath(args.p) LOG.debug("Path to project: %s", path_to_src) # Instansiate Jinja2 Environment with path to Jinja2 templates jinja_env = sdk_helpers.setup_jinja_env("data/docgen/templates") # Load the Jinja2 Templates readme_template = jinja_env.get_template(README_TEMPLATE_NAME) # Generate path to setup.py file path_setup_py_file = os.path.join(path_to_src, package_helpers.BASE_NAME_SETUP_PY) try: # Ensure we have read permissions for setup.py sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file) except SDKException as err: err.message += "\nEnsure you are in the directory of the package you want to run docgen for" raise err # Parse the setup.py file setup_py_attributes = package_helpers.parse_setup_py( path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES) package_name = setup_py_attributes.get("name", "") # Generate paths to other required directories + files path_customize_py_file = os.path.join( path_to_src, package_name, package_helpers.PATH_CUSTOMIZE_PY) path_config_py_file = os.path.join(path_to_src, package_name, package_helpers.PATH_CONFIG_PY) path_readme = os.path.join(path_to_src, package_helpers.BASE_NAME_README) path_screenshots_dir = os.path.join(path_to_src, package_helpers.PATH_SCREENSHOTS) path_payload_samples_dir = os.path.join( path_to_src, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR) # Ensure we have read permissions for each required file and the file exists sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file, path_customize_py_file, path_config_py_file) # Check doc/screenshots directory exists, if not, create it + copy default screenshot if not os.path.isdir(path_screenshots_dir): os.makedirs(path_screenshots_dir) shutil.copy(package_helpers.PATH_DEFAULT_SCREENSHOT, path_screenshots_dir) # Get the resilient_circuits dependency string from setup.py file res_circuits_dep_str = package_helpers.get_dependency_from_install_requires( setup_py_attributes.get("install_requires"), "resilient_circuits") if not res_circuits_dep_str: res_circuits_dep_str = package_helpers.get_dependency_from_install_requires( setup_py_attributes.get("install_requires"), "resilient-circuits") # Get ImportDefinition from customize.py customize_py_import_def = package_helpers.get_import_definition_from_customize_py( path_customize_py_file) # Parse the app.configs from the config.py file jinja_app_configs = package_helpers.get_configs_from_config_py( path_config_py_file) # Get field names from ImportDefinition field_names = [] for f in customize_py_import_def.get("fields", []): f_export_key = f.get("export_key") if "incident/" in f_export_key and f_export_key not in IGNORED_INCIDENT_FIELDS: field_names.append(f.get(ResilientObjMap.FIELDS, "")) # Get data from ImportDefinition import_def_data = sdk_helpers.get_from_export( customize_py_import_def, message_destinations=sdk_helpers.get_object_api_names( ResilientObjMap.MESSAGE_DESTINATIONS, customize_py_import_def.get("message_destinations")), functions=sdk_helpers.get_object_api_names( ResilientObjMap.FUNCTIONS, customize_py_import_def.get("functions")), workflows=sdk_helpers.get_object_api_names( ResilientObjMap.WORKFLOWS, customize_py_import_def.get("workflows")), rules=sdk_helpers.get_object_api_names( ResilientObjMap.RULES, customize_py_import_def.get("actions")), fields=field_names, artifact_types=sdk_helpers.get_object_api_names( ResilientObjMap.INCIDENT_ARTIFACT_TYPES, customize_py_import_def.get("incident_artifact_types")), datatables=sdk_helpers.get_object_api_names( ResilientObjMap.DATATABLES, customize_py_import_def.get("types")), tasks=sdk_helpers.get_object_api_names( ResilientObjMap.TASKS, customize_py_import_def.get("automatic_tasks")), scripts=sdk_helpers.get_object_api_names( ResilientObjMap.SCRIPTS, customize_py_import_def.get("scripts")), playbooks=sdk_helpers.get_object_api_names( ResilientObjMap.PLAYBOOKS, customize_py_import_def.get("playbooks", []))) # Lists we use in Jinja Templates jinja_functions = self._get_function_details( import_def_data.get("functions", []), import_def_data.get("workflows", [])) jinja_scripts = self._get_script_details( import_def_data.get("scripts", [])) jinja_rules = self._get_rule_details(import_def_data.get("rules", [])) jinja_datatables = self._get_datatable_details( import_def_data.get("datatables", [])) jinja_custom_fields = self._get_custom_fields_details( import_def_data.get("fields", [])) jinja_custom_artifact_types = self._get_custom_artifact_details( import_def_data.get("artifact_types", [])) jinja_playbooks = self._get_playbook_details( import_def_data.get("playbooks", [])) # Other variables for Jinja Templates package_name_dash = package_name.replace("_", "-") server_version = customize_py_import_def.get("server_version", {}) supported_app = sdk_helpers.does_url_contain( setup_py_attributes.get("url", ""), "ibm.com/mysupport") # See if a payload_samples dir exists and use the contents for function results try: sdk_helpers.validate_dir_paths(os.R_OK, path_payload_samples_dir) for f in jinja_functions: fn_name = f.get("x_api_name") path_payload_samples_fn_name = os.path.join( path_payload_samples_dir, fn_name) path_output_json_example = os.path.join( path_payload_samples_fn_name, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE) try: sdk_helpers.validate_file_paths(os.R_OK, path_output_json_example) f["results"] = sdk_helpers.read_json_file( path_output_json_example) except SDKException as e: sdk_helpers.handle_file_not_found_error( e, u"Error getting results. No '{0}' file found for '{1}'." .format( package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE, fn_name)) except SDKException as e: sdk_helpers.handle_file_not_found_error( e, u"Error getting results. No '{0}' directory found.".format( package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)) LOG.info("Rendering README for %s", package_name_dash) # Render the README Jinja2 Templeate with parameters rendered_readme = readme_template.render({ "name_underscore": package_name, "name_dash": package_name_dash, "display_name": setup_py_attributes.get("display_name", package_name), "short_description": setup_py_attributes.get("description"), "long_description": setup_py_attributes.get("long_description"), "version": setup_py_attributes.get("version"), "server_version": server_version.get("version"), "all_dependencies": setup_py_attributes.get("install_requires", []), "res_circuits_dependency_str": res_circuits_dep_str, "author": setup_py_attributes.get("author"), "support_url": setup_py_attributes.get("url"), "supported_app": supported_app, "app_configs": jinja_app_configs[1], "functions": jinja_functions, "scripts": jinja_scripts, "rules": jinja_rules, "datatables": jinja_datatables, "custom_fields": jinja_custom_fields, "custom_artifact_types": jinja_custom_artifact_types, "playbooks": jinja_playbooks, "placeholder_string": constants.DOCGEN_PLACEHOLDER_STRING }) # Create a backup if needed of README sdk_helpers.rename_to_bak_file(path_readme, package_helpers.PATH_DEFAULT_README) LOG.info("Writing README to: %s", path_readme) # Write the new README sdk_helpers.write_file(path_readme, rendered_readme)