Пример #1
0
def test_cmd_codegen_args_parser(fx_get_sub_parser,
                                 fx_cmd_line_args_codegen_package):
    cmd_codegen = CmdCodegen(fx_get_sub_parser)

    assert cmd_codegen.parser._optionals.title == "options"

    args = cmd_codegen.parser.parse_known_args()[0]
    assert args.package == "fn_main_mock_integration"
    assert args.messagedestination == ["fn_main_mock_integration"]
    assert args.function == ["mock_function_one"]
    assert args.rule == [
        "Mock Manual Rule", "Mock: Auto Rule", "Mock Task Rule",
        "Mock Script Rule", "Mock Manual Rule Message Destination"
    ]
    assert args.workflow == ["mock_workflow_one", "mock_workflow_two"]
    assert args.field == [
        "mock_field_number", "mock_field_number", "mock_field_text_area"
    ]
    assert args.artifacttype == ["mock_artifact_2", "mock_artifact_type_one"]
    assert args.datatable == ["mock_data_table"]
    assert args.task == [
        "mock_custom_task_one", "mock_cusom_task__________two"
    ]
    assert args.script == ["Mock Script One"]
    assert args.incidenttype == [
        u"mock_incidenttype_Āā", u"mock incident type one"
    ]
Пример #2
0
def test_get_results_from_log_file_specific_function(
        fx_copy_fn_main_mock_integration, fx_cmd_line_args_codegen_base,
        fx_get_sub_parser, caplog):
    mock_integration_name = fx_copy_fn_main_mock_integration[0]
    path_fn_main_mock_integration = fx_copy_fn_main_mock_integration[1]

    # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location
    sys.argv[sys.argv.index(
        mock_integration_name)] = path_fn_main_mock_integration

    # Add arg to gather-results
    sys.argv.extend(["--gather-results", mock_paths.MOCK_APP_LOG_PATH])
    sys.argv.extend(["-f", "mock_function_one", "mock_function_not_exist"])

    cmd_codegen = CmdCodegen(fx_get_sub_parser)
    args = cmd_codegen.parser.parse_known_args()[0]
    cmd_codegen.execute_command(args)

    path_payload_samples = os.path.join(
        path_fn_main_mock_integration,
        package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR)

    # Test output_json_example.json file generated
    output_json_example_contents = sdk_helpers.read_json_file(
        os.path.join(path_payload_samples, "mock_function_one",
                     package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE))
    assert output_json_example_contents.get("version") == 2.1

    # Test output_json_schema.json file generated
    output_json_example_schema = sdk_helpers.read_json_file(
        os.path.join(path_payload_samples, "mock_function_one",
                     package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA))
    output_json_example_schema_props = output_json_example_schema.get(
        "properties")

    assert output_json_example_schema_props.get("version") == {
        "type": "number"
    }
    assert output_json_example_schema_props.get("success") == {
        "type": "boolean"
    }
    assert output_json_example_schema_props.get("reason") == {}
    assert not output_json_example_schema.get("required")

    # Test WARNING log appears
    assert "WARNING: No results could be found for 'mock_function_not_exist'" in caplog.text
Пример #3
0
def test_add_payload_samples():

    mock_fn_name = "Mock Function Name"
    mock_jinja_data = {"mock": "data"}
    mock_mapping_dict = {package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR: {}}
    mock_mapping_dict[
        package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR][mock_fn_name] = {}
    CmdCodegen.add_payload_samples(mock_mapping_dict, mock_fn_name,
                                   mock_jinja_data)

    for f in EXPECTED_FILES_PAYLOAD_SAMPLES_FN_NAME_DIR:
        assert isinstance(
            mock_mapping_dict[package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR]
            [mock_fn_name][f], tuple)
        assert mock_mapping_dict[
            package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR][mock_fn_name][f][
                1] == mock_jinja_data
Пример #4
0
def test_get_results_from_log_file_no_payload_samples_dir(
        fx_copy_fn_main_mock_integration, fx_cmd_line_args_codegen_base,
        fx_get_sub_parser, caplog):

    mock_integration_name = fx_copy_fn_main_mock_integration[0]
    path_fn_main_mock_integration = fx_copy_fn_main_mock_integration[1]

    # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location
    sys.argv[sys.argv.index(
        mock_integration_name)] = path_fn_main_mock_integration

    # Add arg to gather-results and a path to a mock export.res file for --reload
    sys.argv.extend(["--gather-results", mock_paths.MOCK_APP_LOG_PATH])
    sys.argv.extend(["-e", mock_paths.MOCK_RELOAD_EXPORT_RES])

    path_payload_samples = os.path.join(
        path_fn_main_mock_integration,
        package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR)

    # Remove path_payload_samples
    shutil.rmtree(path_payload_samples)

    cmd_codegen = CmdCodegen(fx_get_sub_parser)
    args = cmd_codegen.parser.parse_known_args()[0]
    cmd_codegen.execute_command(args)

    # Test output_json_example.json file generated
    output_json_example_contents = sdk_helpers.read_json_file(
        os.path.join(path_payload_samples, "mock_function_one",
                     package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE))
    assert output_json_example_contents.get("version") == 2.1

    # Test output_json_schema.json file generated
    output_json_example_schema = sdk_helpers.read_json_file(
        os.path.join(path_payload_samples, "mock_function_one",
                     package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA))
    output_json_example_schema_props = output_json_example_schema.get(
        "properties")
    assert output_json_example_schema_props.get("version") == {
        "type": "number"
    }
    assert output_json_example_schema_props.get("reason") == {}

    # Test --reload was ran
    assert "Running 'codegen --reload' to create the default missing files" in caplog.text
Пример #5
0
def test_forget_reload_flag(fx_copy_fn_main_mock_integration,
                            fx_get_sub_parser,
                            fx_cmd_line_args_codegen_package):
    """
    This tests that it you forget the --reload flag you get an error
    """
    output_path = os.path.join(mock_paths.TEST_TEMP_DIR, "mock_path",
                               "fn_main_mock_integration-1.1.0")
    mock_integration_name = fx_copy_fn_main_mock_integration[0]
    shutil.move(fx_copy_fn_main_mock_integration[1], output_path)

    # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location
    sys.argv[sys.argv.index(mock_integration_name)] = output_path

    cmd_codegen = CmdCodegen(fx_get_sub_parser)
    args = cmd_codegen.parser.parse_known_args()[0]

    with pytest.raises(
            SDKException,
            match=r"already exists. Add --reload flag to regenerate it"):
        cmd_codegen._gen_package(args)
Пример #6
0
def test_gen_package(fx_get_sub_parser, fx_cmd_line_args_codegen_package,
                     fx_mk_temp_dir, fx_add_dev_env_var):
    """
    This tests that when a package is generated with codegen
    that each of the EXPECTED_FILES exist in each directory.
    This test is NOT concerned about the contents of each file,
    just that it exists
    """
    output_path = mock_paths.TEST_TEMP_DIR

    # Add paths to an output base and an export.res file
    sys.argv.extend(["-o", output_path])
    sys.argv.extend(["-e", mock_paths.MOCK_EXPORT_RES])

    cmd_codegen = CmdCodegen(fx_get_sub_parser)
    args = cmd_codegen.parser.parse_known_args()[0]
    cmd_codegen._gen_package(args)

    package_name = args.package
    package_path = os.path.join(output_path, args.package)
    general_test_package_structure(package_name, package_path)
Пример #7
0
def test_gather_results_on_py27(fx_copy_fn_main_mock_integration,
                                fx_cmd_line_args_codegen_base,
                                fx_get_sub_parser):

    mock_integration_name = fx_copy_fn_main_mock_integration[0]
    path_fn_main_mock_integration = fx_copy_fn_main_mock_integration[1]

    # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location
    sys.argv[sys.argv.index(
        mock_integration_name)] = path_fn_main_mock_integration

    # Add arg to gather-results and a path to a mock export.res file for --reload
    sys.argv.extend(["--gather-results", mock_paths.MOCK_APP_LOG_PATH])

    cmd_codegen = CmdCodegen(fx_get_sub_parser)
    args = cmd_codegen.parser.parse_known_args()[0]

    if not sdk_helpers.is_python_min_supported_version():

        with pytest.raises(SDKException,
                           match=constants.ERROR_WRONG_PYTHON_VERSION):
            cmd_codegen.execute_command(args)

    else:
        cmd_codegen.execute_command(args)
def test_cmd_codegen(fx_get_sub_parser, fx_cmd_line_args_codegen_package):
    cmd_codegen = CmdCodegen(fx_get_sub_parser)

    assert isinstance(cmd_codegen, base_cmd.BaseCmd)
    assert cmd_codegen.CMD_NAME == "codegen"
    assert cmd_codegen.CMD_HELP == "Generate boilerplate code to start developing an app"
    assert cmd_codegen.CMD_USAGE == """
    $ resilient-sdk codegen -p <name_of_package> -m 'fn_custom_md' --rule 'Rule One' 'Rule Two'
    $ resilient-sdk codegen -p <path_current_package> --reload --workflow 'new_wf_to_add'"""
    assert cmd_codegen.CMD_DESCRIPTION == "Generate boilerplate code to start developing an app"
    assert cmd_codegen.CMD_ADD_PARSERS == ["res_obj_parser", "io_parser"]

    args = cmd_codegen.parser.parse_known_args()[0]
    assert args.package == "fn_main_mock_integration"
Пример #9
0
def test_reload_package_w_playbook(
        fx_copy_fn_main_mock_integration_w_playbooks, fx_get_sub_parser,
        fx_cmd_line_args_codegen_reload):

    output_path = os.path.join(mock_paths.TEST_TEMP_DIR, "mock_path",
                               "fn_main_mock_integration-1.1.0")
    mock_integration_name = fx_copy_fn_main_mock_integration_w_playbooks[0]
    shutil.move(fx_copy_fn_main_mock_integration_w_playbooks[1], output_path)

    # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location
    sys.argv[sys.argv.index(mock_integration_name)] = output_path

    # Add path to a mock export.res file
    sys.argv.extend(["-e", mock_paths.MOCK_RELOAD_EXPORT_RES_W_PLAYBOOK])

    cmd_codegen = CmdCodegen(fx_get_sub_parser)
    args = cmd_codegen.parser.parse_known_args()[0]
    path_package_reloaded = cmd_codegen._reload_package(args)

    import_definition = package_helpers.get_import_definition_from_customize_py(
        os.path.join(path_package_reloaded, mock_integration_name,
                     package_helpers.PATH_CUSTOMIZE_PY))

    res_objs = sdk_helpers.get_from_export(
        import_definition,
        rules=["Additional Mock Rule", "Mock Manual Rule"],
        playbooks=["main_mock_playbook"])

    general_test_package_structure(mock_integration_name,
                                   path_package_reloaded)

    assert helpers.verify_expected_list(
        ["Additional Mock Rule", "Mock Manual Rule"],
        [o.get("x_api_name") for o in res_objs.get("rules")])
    assert helpers.verify_expected_list(
        ["main_mock_playbook"],
        [o.get("x_api_name") for o in res_objs.get("playbooks")])
Пример #10
0
def test_cmd_codegen(fx_get_sub_parser, fx_cmd_line_args_codegen_package):
    cmd_codegen = CmdCodegen(fx_get_sub_parser)

    assert isinstance(cmd_codegen, base_cmd.BaseCmd)
    assert cmd_codegen.CMD_NAME == "codegen"
    assert cmd_codegen.CMD_HELP == "Generates boilerplate code used to begin developing an app."
    assert cmd_codegen.CMD_USAGE == """
    $ resilient-sdk codegen -p <name_of_package> -m 'fn_custom_md' --rule 'Rule One' 'Rule Two' -i 'custom incident type'
    $ resilient-sdk codegen -p <name_of_package> -m 'fn_custom_md' -c '/usr/custom_app.config'
    $ resilient-sdk codegen -p <path_current_package> --reload --workflow 'new_wf_to_add'
    $ resilient-sdk codegen -p <path_current_package> --gather-results
    $ resilient-sdk codegen -p <path_current_package> --gather-results '/usr/custom_app.log' -f 'func_one' 'func_two'"""
    assert cmd_codegen.CMD_DESCRIPTION == cmd_codegen.CMD_HELP
    assert cmd_codegen.CMD_ADD_PARSERS == [
        "app_config_parser", "res_obj_parser", "io_parser"
    ]

    args = cmd_codegen.parser.parse_known_args()[0]
    assert args.package == "fn_main_mock_integration"
def test_merge_codegen_params():
    old_params = {
        "actions": ["rule 1", "rule 2"],
        "scripts": ["script 1"],
        "functions": []
    }

    class args(object):
        function = ["new_fn_1", "new_fn_2"]
        rule = ["rule 3"]
        script = None

    mapping_tuples = [("function", "functions"), ("rule", "actions"),
                      ("script", "scripts")]

    merged_args = CmdCodegen.merge_codegen_params(old_params, args,
                                                  mapping_tuples)

    assert len(merged_args.function) == 2
    assert "new_fn_1" in merged_args.function
    assert "new_fn_2" in merged_args.function
    assert "rule 3" in merged_args.rule
    assert "script 1" in merged_args.script
def test_render_jinja_mapping(fx_mk_temp_dir):

    mock_jinja_data = {
        "functions": [{
            "x_api_name": "fn_mock_function_1"
        }, {
            "x_api_name": "fn_mock_function_2"
        }],
        "export_data": {
            "server_version": {
                "version": "35.0.0"
            }
        }
    }

    jinja_env = sdk_helpers.setup_jinja_env(
        "data/codegen/templates/package_template")

    jinja_mapping_dict = {
        "MANIFEST.in": ("MANIFEST.in.jinja2", mock_jinja_data),
        "README.md": ("README.md.jinja2", mock_jinja_data),
        "setup.py": ("setup.py.jinja2", mock_jinja_data),
        "tox.ini": ("tox.ini.jinja2", mock_jinja_data),
        "Dockerfile": ("Dockerfile.jinja2", mock_jinja_data),
        "entrypoint.sh": ("entrypoint.sh.jinja2", mock_jinja_data),
        "apikey_permissions.txt":
        ("apikey_permissions.txt.jinja2", mock_jinja_data),
        "data": {},
        "icons": {
            "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO,
            "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO,
        },
        "doc": {
            "screenshots": {
                "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT
            }
        },
        "test_package": {
            "__init__.py": ("package/__init__.py.jinja2", mock_jinja_data),
            "LICENSE": ("package/LICENSE.jinja2", mock_jinja_data),
            "components": {
                "__init__.py":
                ("package/components/__init__.py.jinja2", mock_jinja_data),
            },
            "util": {
                "data": {
                    "export.res":
                    ("package/util/data/export.res.jinja2", mock_jinja_data)
                },
                "__init__.py":
                ("package/util/__init__.py.jinja2", mock_jinja_data),
                "config.py":
                ("package/util/config.py.jinja2", mock_jinja_data),
                "customize.py":
                ("package/util/customize.py.jinja2", mock_jinja_data),
                "selftest.py":
                ("package/util/selftest.py.jinja2", mock_jinja_data),
            }
        }
    }

    CmdCodegen.render_jinja_mapping(jinja_mapping_dict, jinja_env,
                                    mock_paths.TEST_TEMP_DIR,
                                    mock_paths.TEST_TEMP_DIR)

    files_in_dir = sorted(os.listdir(mock_paths.TEST_TEMP_DIR))
    assert files_in_dir == [
        'Dockerfile', 'MANIFEST.in', 'README.md', 'apikey_permissions.txt',
        'data', 'doc', 'entrypoint.sh', 'icons', 'setup.py', 'test_package',
        'tox.ini'
    ]

    files_in_icons_dir = sorted(
        os.listdir(os.path.join(mock_paths.TEST_TEMP_DIR, "icons")))
    assert files_in_icons_dir == ['app_logo.png', 'company_logo.png']

    files_in_test_package = sorted(
        os.listdir(os.path.join(mock_paths.TEST_TEMP_DIR, "test_package")))
    assert files_in_test_package == [
        'LICENSE', '__init__.py', 'components', 'util'
    ]

    files_in_util = sorted(
        os.listdir(
            os.path.join(mock_paths.TEST_TEMP_DIR, "test_package", "util")))
    assert files_in_util == [
        '__init__.py', 'config.py', 'customize.py', 'data', 'selftest.py'
    ]

    files_in_util_data = sorted(
        os.listdir(
            os.path.join(mock_paths.TEST_TEMP_DIR, "test_package",
                         package_helpers.PATH_UTIL_DATA_DIR)))
    assert files_in_util_data == ['export.res']

    files_in_components = sorted(
        os.listdir(
            os.path.join(mock_paths.TEST_TEMP_DIR, "test_package",
                         "components")))
    assert files_in_components == ['__init__.py']

    customize_py = sdk_helpers.read_file(
        os.path.join(mock_paths.TEST_TEMP_DIR, "test_package", "util",
                     "customize.py"))
    assert '        "functions": [u"fn_mock_function_1", u"fn_mock_function_2"],\n' in customize_py
Пример #13
0
html_show_sourcelink = False
html_domain_indices = False

# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']

# autodoc configs
add_module_names = False

# resilient-sdk parser
sdk_parser = sdk_app.get_main_app_parser()
sdk_sub_parser = sdk_app.get_main_app_sub_parser(sdk_parser)

cmd_codegen = CmdCodegen(sdk_sub_parser)
cmd_docgen = CmdDocgen(sdk_sub_parser)
cmd_ext_package = CmdExtPackage(sdk_sub_parser)
cmd_clone = CmdClone(sdk_sub_parser)
cmd_extract = CmdExtract(sdk_sub_parser)
cmd_validate = CmdValidate(sdk_sub_parser)

# parse the setup.py files
the_globals = {"__file__": "", "long_description": ""}
resilient_setup_attributes = parse_setup_py(
    os.path.abspath("../resilient/setup.py"),
    SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES,
    the_globals=the_globals)
circuits_setup_attributes = parse_setup_py(
    os.path.abspath("../resilient-circuits/setup.py"),
    SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES,
Пример #14
0
def test_reload_package(fx_copy_fn_main_mock_integration, fx_get_sub_parser,
                        fx_cmd_line_args_codegen_reload):
    """
    This tests that when a package is reloaded with codegen --reload
    that each of the EXPECTED_FILES exist and also the additional 'Additional Mock Rule'
    and its related Workflow which has a Function is also added to the package
    """

    output_path = os.path.join(mock_paths.TEST_TEMP_DIR, "mock_path",
                               "fn_main_mock_integration-1.1.0")
    mock_integration_name = fx_copy_fn_main_mock_integration[0]
    shutil.move(fx_copy_fn_main_mock_integration[1], output_path)

    # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location
    sys.argv[sys.argv.index(mock_integration_name)] = output_path

    # Add path to a mock export.res file
    sys.argv.extend(["-e", mock_paths.MOCK_RELOAD_EXPORT_RES])

    cmd_codegen = CmdCodegen(fx_get_sub_parser)
    args = cmd_codegen.parser.parse_known_args()[0]
    path_package_reloaded = cmd_codegen._reload_package(args)

    # This is really getting the import definition from the new data/export.res file, so tests that as well
    import_definition = package_helpers.get_import_definition_from_customize_py(
        os.path.join(path_package_reloaded, mock_integration_name,
                     package_helpers.PATH_CUSTOMIZE_PY))

    res_objs = sdk_helpers.get_from_export(
        import_definition,
        rules=["Additional Mock Rule", "Mock Manual Rule"],
        functions=[
            "additional_mock_function", "mock_function_one",
            "funct_new_mock_function", "func_new_mock_function"
        ],
        workflows=[
            "additional_mock_workflow", "mock_workflow_one",
            "wf_new_mock_workflow"
        ])

    # Assert the general structure of the reloaded package
    general_test_package_structure(mock_integration_name,
                                   path_package_reloaded)

    # Assert the additional rule, function and workflow were added
    assert helpers.verify_expected_list(
        ["Additional Mock Rule", "Mock Manual Rule"],
        [o.get("x_api_name") for o in res_objs.get("rules")])
    assert helpers.verify_expected_list([
        "additional_mock_function", "mock_function_one",
        "funct_new_mock_function", "func_new_mock_function"
    ], [o.get("x_api_name") for o in res_objs.get("functions")])
    assert helpers.verify_expected_list([
        "additional_mock_workflow", "mock_workflow_one", "wf_new_mock_workflow"
    ], [o.get("x_api_name") for o in res_objs.get("workflows")])

    # Assert a new components file is created
    expected_component_files = EXPECTED_FILES_PACKAGE_COMPONENTS_DIR + [
        "funct_additional_mock_function.py"
    ]
    assert helpers.verify_expected_list(
        expected_component_files,
        os.listdir(
            os.path.join(path_package_reloaded, mock_integration_name,
                         "components")))

    # Assert a new components file with prefix 'funct_' is created
    expected_component_files = ["funct_new_mock_function.py"]
    assert helpers.verify_expected_list(
        expected_component_files,
        os.listdir(
            os.path.join(path_package_reloaded, mock_integration_name,
                         "components")))

    # Assert a new components file with prefix 'func_' is created
    expected_component_files = ["func_new_mock_function.py"]
    assert helpers.verify_expected_list(
        expected_component_files,
        os.listdir(
            os.path.join(path_package_reloaded, mock_integration_name,
                         "components")))

    # Assert a new tests file is created
    expected_test_files = EXPECTED_FILES_TESTS_DIR + [
        "test_funct_additional_mock_function.py"
    ]
    assert helpers.verify_expected_list(
        expected_test_files,
        os.listdir(os.path.join(path_package_reloaded, "tests")))

    # Assert a new tests file including 'func_' is created.
    expected_test_files = ["test_func_new_mock_function.py"]
    assert helpers.verify_expected_list(
        expected_test_files,
        os.listdir(os.path.join(path_package_reloaded, "tests")))

    # Assert a new md file is created in data dir
    expected_workflow_files = EXPECTED_FILES_DATA_DIR + [
        "wf_additional_mock_workflow.md"
    ]
    assert helpers.verify_expected_list(
        expected_workflow_files,
        os.listdir(os.path.join(path_package_reloaded, "data")))

    # Assert a new md file with 'wf_' is created in data dir
    expected_workflow_files = ["wf_new_mock_workflow.md"]
    assert helpers.verify_expected_list(
        expected_workflow_files,
        os.listdir(os.path.join(path_package_reloaded, "data")))

    # Remove files from generated package path and recreate without prefix or substring of 'funct_' or 'wf_'.
    os.remove(
        os.path.join(path_package_reloaded, mock_integration_name,
                     "components", "funct_additional_mock_function.py"))
    Path(
        os.path.join(path_package_reloaded, mock_integration_name,
                     "components", "additional_mock_function.py")).touch()
    os.remove(
        os.path.join(path_package_reloaded, "tests",
                     "test_funct_additional_mock_function.py"))
    Path(
        os.path.join(path_package_reloaded, "tests",
                     "test_additional_mock_function.py")).touch()
    os.remove(
        os.path.join(path_package_reloaded, "data",
                     "wf_additional_mock_workflow.md"))
    Path(
        os.path.join(path_package_reloaded, "data",
                     "additional_mock_workflow.md")).touch()

    # Get modification time for workflow file "wf_mock_workflow_one.md" in seconds since the epoch.'
    wf_modified_time = os.path.getmtime(
        os.path.join(path_package_reloaded, "data", "wf_mock_workflow_one.md"))

    # Perform another test reload.
    cmd_codegen = CmdCodegen(fx_get_sub_parser)
    args = cmd_codegen.parser.parse_known_args()[0]
    path_package_reloaded = cmd_codegen._reload_package(args)

    # This is really getting the import definition from the new data/export.res file, so tests that as well
    import_definition = package_helpers.get_import_definition_from_customize_py(
        os.path.join(path_package_reloaded, mock_integration_name,
                     package_helpers.PATH_CUSTOMIZE_PY))

    res_objs = sdk_helpers.get_from_export(
        import_definition,
        rules=["Additional Mock Rule", "Mock Manual Rule"],
        functions=[
            "additional_mock_function", "mock_function_one",
            "funct_new_mock_function", "func_new_mock_function"
        ],
        workflows=[
            "additional_mock_workflow", "mock_workflow_one",
            "wf_new_mock_workflow"
        ])

    # Assert the general structure of the reloaded package
    general_test_package_structure(mock_integration_name,
                                   path_package_reloaded)

    # Assert a new components file with 'funct_'  prefix is not created
    expected_component_files = ["funct_additional_mock_function.py"]
    assert not helpers.verify_expected_list(
        expected_component_files,
        os.listdir(
            os.path.join(path_package_reloaded, mock_integration_name,
                         "components")))
    # Assert a new workflow file with 'md_' prefix is not created in data dir
    expected_workflow_files = ["wf_additional_mock_workflow.md"]
    assert not helpers.verify_expected_list(
        expected_workflow_files,
        os.listdir(os.path.join(path_package_reloaded, "data")))
    # Assert a new tests file with "funct_" substring is not created
    expected_test_files = ["test_func_additional_mock_function.py"]
    assert not helpers.verify_expected_list(
        expected_test_files,
        os.listdir(os.path.join(path_package_reloaded, "tests")))
    # Get new modification time for test workflow file.
    new_wf_modified_time = os.path.getmtime(
        os.path.join(path_package_reloaded, "data", "wf_mock_workflow_one.md"))
    # Assert modification time of workflow has been updated.
    assert new_wf_modified_time > wf_modified_time
Пример #15
0
def main():
    """
    Main entry point for resilient-sdk
    """

    # See if RES_SDK_DEV environment var is set
    sdk_dev = sdk_helpers.str_to_bool(os.getenv("RES_SDK_DEV"))

    # Get main parser object
    parser = get_main_app_parser()

    # Get sub_parser object, its dest is cmd
    sub_parser = get_main_app_sub_parser(parser)

    # Add any subcommands to main app parser here
    cmd_codegen = CmdCodegen(sub_parser)
    cmd_clone = CmdClone(sub_parser)
    cmd_docgen = CmdDocgen(sub_parser)
    cmd_extract = CmdExtract(sub_parser)
    cmd_ext_package = CmdExtPackage(sub_parser)

    if sdk_dev:
        # Add 'dev' command if environment var set
        cmd_dev = CmdDev(sub_parser)

    try:
        # Parse the arguments
        args = parser.parse_args()

        if args.cmd is None:
            parser.print_help()
            sys.exit()

    except SDKException as err:
        # Get main_cmd (codegen, docgen etc.)
        main_cmd = sdk_helpers.get_main_cmd()

        LOG.error(err)
        LOG.info("\n-----------------\n")

        # Print specifc usage for that cmd for these errors
        if "too few arguments" in err.message or "no subcommand provided" in err.message:
            if main_cmd == cmd_codegen.CMD_NAME:
                cmd_codegen.parser.print_usage()

            elif main_cmd == cmd_clone.CMD_NAME:
                cmd_clone.parser.print_usage()

            elif main_cmd == cmd_docgen.CMD_NAME:
                cmd_docgen.parser.print_usage()

            elif main_cmd == cmd_extract.CMD_NAME:
                cmd_extract.parser.print_usage()

            elif main_cmd == cmd_ext_package.CMD_NAME:
                cmd_ext_package.parser.print_usage()

            elif sdk_dev and main_cmd == cmd_dev.CMD_NAME:
                cmd_dev.parser.print_usage()

            else:
                parser.print_help()

        # Exit
        sys.exit()

    # If -v was specified, set the log level to DEBUG
    if args.verbose:
        LOG.setLevel(logging.DEBUG)
        LOG.debug("Logging set to DEBUG mode")

    # Handle what subcommand was called
    if args.cmd == cmd_docgen.CMD_NAME:
        cmd_docgen.execute_command(args)

    elif args.cmd == cmd_codegen.CMD_NAME:
        cmd_codegen.execute_command(args)

    elif args.cmd == cmd_clone.CMD_NAME:
        cmd_clone.execute_command(args)

    elif args.cmd == cmd_extract.CMD_NAME:
        cmd_extract.execute_command(args)

    elif args.cmd == cmd_ext_package.CMD_NAME:
        cmd_ext_package.execute_command(args)

    elif sdk_dev and args.cmd == cmd_dev.CMD_NAME:
        cmd_dev.execute_command(args)
Пример #16
0
def main():
    """
    Main entry point for resilient-sdk
    """

    # add color support for WINDOWS
    os.system("")

    # See if RES_SDK_DEV environment var is set
    sdk_dev = sdk_helpers.is_env_var_set(constants.ENV_VAR_DEV)

    # Get main parser object
    parser = get_main_app_parser()

    # Get sub_parser object, its dest is cmd
    sub_parser = get_main_app_sub_parser(parser)

    if sdk_dev:
        # Add 'dev' command if environment var set
        cmd_dev = CmdDev(sub_parser)
        LOG.info("{0}Running SDK in Developer Mode{0}".format(constants.LOG_DIVIDER))

    # Add any subcommands to main app parser here
    cmd_validate = CmdValidate(sub_parser)
    cmd_codegen = CmdCodegen(sub_parser)
    cmd_clone = CmdClone(sub_parser)
    cmd_docgen = CmdDocgen(sub_parser)
    cmd_extract = CmdExtract(sub_parser)
    cmd_ext_package = CmdExtPackage(sub_parser, cmd_validate=cmd_validate)

    try:
        # Parse the arguments
        args = parser.parse_args()

        if args.cmd is None:
            parser.print_help()
            sys.exit()

    except SDKException as err:
        # Get main_cmd (codegen, docgen etc.)
        main_cmd = sdk_helpers.get_main_cmd()

        LOG.error(err)
        LOG.info("{0}".format(constants.LOG_DIVIDER))

        # Print specifc usage for that cmd for these errors
        if "too few arguments" in err.message or "no subcommand provided" in err.message:
            if main_cmd == cmd_codegen.CMD_NAME:
                cmd_codegen.parser.print_usage()

            elif main_cmd == cmd_clone.CMD_NAME:
                cmd_clone.parser.print_usage()

            elif main_cmd == cmd_docgen.CMD_NAME:
                cmd_docgen.parser.print_usage()

            elif main_cmd == cmd_extract.CMD_NAME:
                cmd_extract.parser.print_usage()

            elif main_cmd == cmd_ext_package.CMD_NAME:
                cmd_ext_package.parser.print_usage()

            elif main_cmd == cmd_validate.CMD_NAME:
                cmd_validate.parser.print_usage()

            elif sdk_dev and main_cmd == cmd_dev.CMD_NAME:
                cmd_dev.parser.print_usage()

            else:
                parser.print_help()

        # Exit
        sys.exit()

    # If -v was specified, set the log level to DEBUG
    if args.verbose:
        LOG.setLevel(logging.DEBUG)
        LOG.debug("Logging set to DEBUG mode")

    # Handle what subcommand was called
    if args.cmd == cmd_docgen.CMD_NAME:
        cmd_docgen.execute_command(args)

    elif args.cmd == cmd_codegen.CMD_NAME:
        cmd_codegen.execute_command(args)

    elif args.cmd == cmd_clone.CMD_NAME:
        cmd_clone.execute_command(args)

    elif args.cmd == cmd_extract.CMD_NAME:
        cmd_extract.execute_command(args)

    elif args.cmd == cmd_ext_package.CMD_NAME:
        cmd_ext_package.execute_command(args)

    elif args.cmd == cmd_validate.CMD_NAME:
        cmd_validate.execute_command(args)
    elif sdk_dev and args.cmd == cmd_dev.CMD_NAME:
        cmd_dev.execute_command(args)