Esempio n. 1
0
def prepare_single_content_item_for_validation(filename: str, data: bytes, tmp_directory: str) -> str:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name, base_dir=tmp_directory, pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({'description': 'Temporary Pack', 'author': 'xsoar'})
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    data_as_string = data.decode()
    loaded_data = yaml.load(data_as_string)
    buff = io.StringIO()
    yaml.dump(loaded_data, buff)
    data_as_string = buff.getvalue()
    # write yaml integration file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    extractor = Extractor(
        input=str(file_path), file_type=file_type, output=containing_dir, no_logging=True, no_pipenv=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    return extractor.get_output_path()
Esempio n. 2
0
def prepare_single_content_item_for_validation(
        filename: str, data: bytes, tmp_directory: str) -> Tuple[str, Dict]:
    content = Content(tmp_directory)
    pack_name = 'TmpPack'
    pack_dir = content.path / 'Packs' / pack_name
    # create pack_metadata.json file in TmpPack
    contrib_converter = ContributionConverter(name=pack_name,
                                              base_dir=tmp_directory,
                                              pack_dir_name=pack_name)
    contrib_converter.create_metadata_file({
        'description': 'Temporary Pack',
        'author': 'xsoar'
    })
    prefix = '-'.join(filename.split('-')[:-1])
    containing_dir = pack_dir / ENTITY_TYPE_TO_DIR.get(prefix, 'Integrations')
    containing_dir.mkdir(exist_ok=True)
    is_json = filename.casefold().endswith('.json')
    data_as_string = data.decode()
    loaded_data = json.loads(data_as_string) if is_json else yaml.load(
        data_as_string)
    if is_json:
        data_as_string = json.dumps(loaded_data)
    else:
        buff = io.StringIO()
        yaml.dump(loaded_data, buff)
        data_as_string = buff.getvalue()
    # write content item file to file system
    file_path = containing_dir / filename
    file_path.write_text(data_as_string)
    file_type = find_type(str(file_path))
    file_type = file_type.value if file_type else file_type
    if is_json or file_type in (FileType.PLAYBOOK.value,
                                FileType.TEST_PLAYBOOK.value):
        return str(file_path), {}
    extractor = Extractor(input=str(file_path),
                          file_type=file_type,
                          output=containing_dir,
                          no_logging=True,
                          no_pipenv=True,
                          no_basic_fmt=True)
    # validate the resulting package files, ergo set path_to_validate to the package directory that results
    # from extracting the unified yaml to a package format
    extractor.extract_to_package_format()
    code_fp_to_row_offset = {
        get_extracted_code_filepath(extractor):
        extractor.lines_inserted_at_code_start
    }
    return extractor.get_output_path(), code_fp_to_row_offset
Esempio n. 3
0
def prepare_content_pack_for_validation(filename: str, data: bytes, tmp_directory: str) -> str:
    # write zip file data to file system
    zip_path = os.path.abspath(os.path.join(tmp_directory, filename))
    with open(zip_path, 'wb') as fp:
        fp.write(data)

    pack_name = get_pack_name(zip_path)
    contrib_converter = ContributionConverter(name=pack_name, contribution=zip_path, base_dir=tmp_directory)
    convert_contribution_to_pack(contrib_converter)
    # Call the standalone function and get the raw response
    os.remove(zip_path)
    return contrib_converter.pack_dir_path
Esempio n. 4
0
def test_convert_contribution_dir_to_pack_contents(tmp_path):
    """
    Scenario: convert a directory which was unarchived from a contribution zip into the content
        pack directory into which the contribution is intended to update, and the contribution
        includes a file that already exists in the pack

    Given
    - The pack's original content contains incident field files and appears like so

        ├── IncidentFields
        │   └── incidentfield-SomeIncidentField.json

    When
    - After the contribution zip files have been unarchived to the destination pack the pack
        directory tree appears like so

        ├── IncidentFields
        │   └── incidentfield-SomeIncidentField.json
        ├── incidentfield
        │   └── incidentfield-SomeIncidentField.json

    Then
    - Ensure the file '.../incidentfield/incidentfield-SomeIncidentField.json' is moved to
        '.../IncidentFields/incidentfield-SomeIncidentField.json' and overwrites the existing file
    """
    fake_pack_subdir = tmp_path / 'IncidentFields'
    fake_pack_subdir.mkdir()
    extant_file = fake_pack_subdir / 'incidentfield-SomeIncidentField.json'
    old_json = {"field": "old_value"}
    extant_file.write_text(json.dumps(old_json))
    fake_pack_extracted_dir = tmp_path / 'incidentfield'
    fake_pack_extracted_dir.mkdir()
    update_file = fake_pack_extracted_dir / 'incidentfield-SomeIncidentField.json'
    new_json = {"field": "new_value"}
    update_file.write_text(json.dumps(new_json))
    cc = ContributionConverter()
    cc.pack_dir_path = tmp_path
    cc.convert_contribution_dir_to_pack_contents(fake_pack_extracted_dir)
    assert json.loads(extant_file.read_text()) == new_json
    assert not fake_pack_extracted_dir.exists()
Esempio n. 5
0
def convert_contribution_to_pack(
        contrib_converter: ContributionConverter) -> Dict:
    """Create or updates a pack in the content repo from the contents of a contribution zipfile

    Args:
        contrib_converter (ContributionConverter): Contribution contributor object
    """
    # only create pack_metadata.json and base pack files if creating a new pack
    if contrib_converter.create_new:
        if contrib_converter.contribution:
            # create pack metadata file
            with zipfile.ZipFile(
                    contrib_converter.contribution) as zipped_contrib:
                with zipped_contrib.open('metadata.json') as metadata_file:
                    metadata = json.loads(metadata_file.read())
                    contrib_converter.create_metadata_file(metadata)
        # create base files
        contrib_converter.create_pack_base_files = types.MethodType(
            _create_pack_base_files, contrib_converter)
        contrib_converter.create_pack_base_files()
    # unpack
    contrib_converter.unpack_contribution_to_dst_pack_directory()
    # convert
    unpacked_contribution_dirs = get_child_directories(
        contrib_converter.pack_dir_path)
    for unpacked_contribution_dir in unpacked_contribution_dirs:
        contrib_converter.convert_contribution_dir_to_pack_contents(
            unpacked_contribution_dir)
    # extract to package format
    code_fp_to_row_offset: Dict[str, int] = {}
    for pack_subdir in get_child_directories(contrib_converter.pack_dir_path):
        basename = os.path.basename(pack_subdir)
        if basename in {SCRIPTS_DIR, INTEGRATIONS_DIR}:
            contrib_converter.content_item_to_package_format = types.MethodType(
                content_item_to_package_format, contrib_converter)
            contrib_converter.content_item_to_package_format(
                pack_subdir,
                del_unified=True,
                source_mapping=None,
                code_fp_to_row_offset=code_fp_to_row_offset)
    return code_fp_to_row_offset
Esempio n. 6
0
def test_convert_contribution_zip_updated_pack(get_content_path_mock, get_python_version_mock, tmp_path, mocker):
    """
    Create a fake contribution zip file and test that it is converted to a Pack correctly.
    The pack already exists, checking the update flow.

    Args:
        get_content_path_mock (MagicMock): Patch of the 'get_content_path' function to return the fake repo directory
            used in the test
        get_python_version_mock (MagicMock): Patch of the 'get_python_version' function to return the "3.7"
        tmp_path (fixture): Temporary Path used for the unit test and cleaned up afterwards

    Scenario: Simulate converting a contribution zip file.

    Given
    - A contribution zip file
    - The zipfile contains a unified integration file
    When
    - Converting the zipfile to a valid Pack structure
    - The contribution is an update to an existing pack
    Then
    - Ensure integration are componentized and in valid directory structure
    - Ensure that readme file has not been changed.

    """
    mocker.patch.object(GitUtil, '__init__', return_value=None)
    mocker.patch.object(GitUtil, 'added_files', return_value=set())
    mocker.patch.object(GitUtil, 'modified_files', return_value=set())
    # Create all Necessary Temporary directories
    # create temp directory for the repo
    repo_dir = tmp_path / 'content_repo'
    repo_dir.mkdir()
    get_content_path_mock.return_value = repo_dir
    get_python_version_mock.return_value = 3.7
    # create temp target dir in which we will create all the TestSuite content items to use in the contribution zip and
    # that will be deleted after
    target_dir = repo_dir / 'target_dir'
    target_dir.mkdir()
    # create temp directory in which the contribution zip will reside
    contribution_zip_dir = tmp_path / 'contrib_zip'
    contribution_zip_dir.mkdir()
    # Create fake content repo and contribution zip
    repo = Repo(repo_dir)
    pack = repo.create_pack('TestPack')
    integration = pack.create_integration('integration0')
    integration.create_default_integration()
    contrib_zip = Contribution(target_dir, 'ContribTestPack', repo)
    contrib_zip.create_zip(contribution_zip_dir)
    # target_dir should have been deleted after creation of the zip file
    assert not target_dir.exists()
    name = 'Test Pack'
    contribution_path = contrib_zip.created_zip_filepath
    description = 'test pack description here'
    author = 'Octocat Smith'
    contrib_converter_inst = ContributionConverter(
        name=name, contribution=contribution_path, description=description, author=author, create_new=False,
        no_pipenv=True)
    contrib_converter_inst.convert_contribution_to_pack()
    converted_pack_path = repo_dir / 'Packs' / 'TestPack'
    assert converted_pack_path.exists()
    integrations_path = converted_pack_path / 'Integrations'
    sample_integration_path = integrations_path / 'integration0'
    integration_yml = sample_integration_path / 'integration0.yml'
    integration_py = sample_integration_path / 'integration0.py'
    integration_description = sample_integration_path / 'integration0_description.md'
    integration_image = sample_integration_path / 'integration0_image.png'
    integration_readme_md = sample_integration_path / 'README.md'
    unified_yml = integrations_path / 'integration-integration0.yml'
    unified_yml_in_sample = sample_integration_path / 'integration-integration0.yml'
    integration_files = [integration_yml, integration_py, integration_description, integration_image,
                         integration_readme_md]
    for integration_file in integration_files:
        assert integration_file.exists()
    # In a new pack that part will exist.

    assert not unified_yml.exists()
    assert not unified_yml_in_sample.exists()
Esempio n. 7
0
def create_contribution_converter(request: FixtureRequest, tmp_path_factory: TempPathFactory) -> ContributionConverter:
    tmp_dir = _mk_tmp(request, tmp_path_factory)
    return ContributionConverter(name=request.param, base_dir=str(tmp_dir))
Esempio n. 8
0
def contrib_converter():
    return ContributionConverter('')
Esempio n. 9
0
def test_convert_contribution_zip_with_args(get_content_path_mock, get_python_version_mock, tmp_path, mocker):
    """Convert a contribution zip to a pack and test that the converted pack's 'pack_metadata.json' is correct

    Args:
        get_content_path_mock (MagicMock): Patch of the 'get_content_path' function to return the fake repo directory
            used in the test
        get_python_version_mock (MagicMock): Patch of the 'get_python_version' function to return the "3.7"
        tmp_path (fixture): Temporary Path used for the unit test and cleaned up afterwards

    Scenario: Simulate converting a contribution zip file

    Given
    - A contribution zip file
    When
    - The contrib_converter class instance is instantiated with the 'name' argument of 'Test Pack'
    - The contrib_converter class instance is instantiated with the 'description' argument
      of 'test pack description here'
    - The contrib_converter class instance is instantiated with the 'author' argument of 'Octocat Smith'
    - The contrib_converter class instance is instantiated with the 'gh_user' argument of 'octocat'
    Then
    - Ensure pack with directory name of 'TestPack' is created
    - Ensure that the pack's 'pack_metadata.json' file's 'name' field is 'Test Pack'
    - Ensure that the pack's 'pack_metadata.json' file's 'description' field is 'test pack description here'
    - Ensure that the pack's 'pack_metadata.json' file's 'author' field is 'Octocat Smith'
    - Ensure that the pack's 'pack_metadata.json' file's 'githubUser' field a list containing only 'octocat'
    - Ensure that the pack's 'pack_metadata.json' file's 'email' field is the empty string
    """
    mocker.patch.object(GitUtil, '__init__', return_value=None)
    mocker.patch.object(GitUtil, 'added_files', return_value=set())
    mocker.patch.object(GitUtil, 'modified_files', return_value=set())

    # Create all Necessary Temporary directories
    # create temp directory for the repo
    repo_dir = tmp_path / 'content_repo'
    repo_dir.mkdir()
    get_content_path_mock.return_value = repo_dir
    get_python_version_mock.return_value = 3.7
    # create temp target dir in which we will create all the TestSuite content items to use in the contribution zip and
    # that will be deleted after
    target_dir = repo_dir / 'target_dir'
    target_dir.mkdir()
    # create temp directory in which the contribution zip will reside
    contribution_zip_dir = tmp_path / 'contrib_zip'
    contribution_zip_dir.mkdir()
    # Create fake content repo and contribution zip
    repo = Repo(repo_dir)
    contrib_zip = Contribution(target_dir, 'ContribTestPack', repo)
    # contrib_zip.create_zip(contribution_zip_dir)
    contrib_zip.create_zip(contribution_zip_dir)

    # target_dir should have been deleted after creation of the zip file
    assert not target_dir.exists()

    name = 'Test Pack'
    contribution_path = contrib_zip.created_zip_filepath
    description = 'test pack description here'
    author = 'Octocat Smith'
    gh_user = '******'
    contrib_converter_inst = ContributionConverter(
        name=name, contribution=contribution_path, description=description, author=author, gh_user=gh_user,
        no_pipenv=True)
    contrib_converter_inst.convert_contribution_to_pack()

    converted_pack_path = repo_dir / 'Packs' / 'TestPack'
    assert converted_pack_path.exists()

    pack_metadata_path = converted_pack_path / 'pack_metadata.json'
    assert pack_metadata_path.exists()
    with open(pack_metadata_path, 'r') as pack_metadata:
        metadata = json.load(pack_metadata)
        assert metadata.get('name', '') == name
        assert metadata.get('description', '') == description
        assert metadata.get('author', '') == author
        assert metadata.get('githubUser', []) == [gh_user]
        assert metadata.get('marketplaces', []) == ['xsoar', 'marketplacev2']
        assert not metadata.get('email')
Esempio n. 10
0
def test_convert_contribution_zip(get_content_path_mock, get_python_version_mock, tmp_path, mocker):
    """Create a fake contribution zip file and test that it is converted to a Pack correctly

    Args:
        get_content_path_mock (MagicMock): Patch of the 'get_content_path' function to return the fake repo directory
            used in the test
        get_python_version_mock (MagicMock): Patch of the 'get_python_version' function to return the "3.7"
        tmp_path (fixture): Temporary Path used for the unit test and cleaned up afterwards

    Scenario: Simulate converting a contribution zip file

    Given
    - A contribution zip file
    - The zipfile contains a unified script file
    - The zipfile contains a unified integration file
    When
    - Converting the zipfile to a valid Pack structure
    Then
    - Ensure script and integration are componentized and in valid directory structure
    - Ensure readme_files is not empty and the generated docs exists.
    """
    mocker.patch.object(GitUtil, '__init__', return_value=None)
    mocker.patch.object(GitUtil, 'added_files', return_value=set())
    mocker.patch.object(GitUtil, 'modified_files', return_value=set())
    # Create all Necessary Temporary directories
    # create temp directory for the repo
    repo_dir = tmp_path / 'content_repo'
    repo_dir.mkdir()
    get_content_path_mock.return_value = repo_dir
    get_python_version_mock.return_value = 3.7
    # create temp target dir in which we will create all the TestSuite content items to use in the contribution zip and
    # that will be deleted after
    target_dir = repo_dir / 'target_dir'
    target_dir.mkdir()
    # create temp directory in which the contribution zip will reside
    contribution_zip_dir = tmp_path / 'contrib_zip'
    contribution_zip_dir.mkdir()
    # Create fake content repo and contribution zip
    repo = Repo(repo_dir)
    contrib_zip = Contribution(target_dir, 'ContribTestPack', repo)
    contrib_zip.create_zip(contribution_zip_dir)
    # target_dir should have been deleted after creation of the zip file
    assert not target_dir.exists()

    # rename script-script0.yml unified to automation-script0.yml
    # this naming is aligned to how the server exports scripts in contribution zips
    rename_file_in_zip(
        contrib_zip.created_zip_filepath, 'automation/script-script0.yml', 'automation/automation-script0.yml'
    )

    name = 'Contrib Test Pack'
    contribution_path = contrib_zip.created_zip_filepath
    description = 'test pack description here'
    author = 'Octocat Smith'
    contrib_converter_inst = ContributionConverter(
        name=name, contribution=contribution_path, description=description, author=author, no_pipenv=True)
    contrib_converter_inst.convert_contribution_to_pack()

    converted_pack_path = repo_dir / 'Packs' / 'ContribTestPack'
    assert converted_pack_path.exists()

    scripts_path = converted_pack_path / 'Scripts'
    sample_script_path = scripts_path / 'SampleScript'
    script_yml = sample_script_path / 'SampleScript.yml'
    script_py = sample_script_path / 'SampleScript.py'
    script_readme_md = sample_script_path / 'README.md'
    unified_script_in_sample = sample_script_path / 'automation-script0.yml'
    unified_script = scripts_path / 'automation-script0.yml'

    assert scripts_path.exists()
    assert sample_script_path.exists()
    assert script_yml.exists()
    assert script_py.exists()
    assert script_readme_md.exists()
    assert not unified_script_in_sample.exists()
    assert not unified_script.exists()

    integrations_path = converted_pack_path / 'Integrations'
    sample_integration_path = integrations_path / 'Sample'
    integration_yml = sample_integration_path / 'Sample.yml'
    integration_py = sample_integration_path / 'Sample.py'
    integration_description = sample_integration_path / 'Sample_description.md'
    integration_image = sample_integration_path / 'Sample_image.png'
    integration_readme_md = sample_integration_path / 'README.md'
    unified_yml = integrations_path / 'integration-integration0.yml'
    unified_yml_in_sample = sample_integration_path / 'integration-integration0.yml'
    integration_files = [integration_yml, integration_py, integration_description, integration_image,
                         integration_readme_md]
    for integration_file in integration_files:
        assert integration_file.exists()
    assert not unified_yml.exists()
    assert not unified_yml_in_sample.exists()

    playbooks_path = converted_pack_path / 'Playbooks'
    playbook_yml = playbooks_path / 'playbook-SamplePlaybook.yml'
    playbook_readme_md = playbooks_path / 'playbook-SamplePlaybook_README.md'

    assert playbooks_path.exists()
    assert playbook_yml.exists()
    assert playbook_readme_md.exists()

    layouts_path = converted_pack_path / 'Layouts'
    sample_layoutscontainer = layouts_path / f'{LAYOUTS_CONTAINER}-fakelayoutscontainer.json'
    sample_layout = layouts_path / f'{LAYOUT}-fakelayout.json'

    assert layouts_path.exists()
    assert sample_layoutscontainer.exists()
    assert sample_layout.exists()

    assert set(contrib_converter_inst.readme_files) == {str(playbook_readme_md), str(integration_readme_md),
                                                        str(script_readme_md)}
Esempio n. 11
0
def test_convert_contribution_zip_outputs_structure(get_content_path_mock, get_python_version_mock, tmp_path, mocker):
    """Create a fake contribution zip file and test that it is converted to a Pack correctly

    Args:
        get_content_path_mock (MagicMock): Patch of the 'get_content_path' function to return the fake repo directory
            used in the test
        get_python_version_mock (MagicMock): Patch of the 'get_python_version' function to return the "3.7"
        tmp_path (fixture): Temporary Path used for the unit test and cleaned up afterwards

    Scenario: Simulate converting a contribution zip file

    Given
    - A contribution zip file
    - The zipfile contains a unified script file
    - The zipfile contains a unified integration file
    When
    - Converting the zipfile to a valid Pack structure
    Then
    - Ensure the unified yaml files of the integration and script have been removed from the output created by
      converting the contribution zip file
    """
    mocker.patch.object(GitUtil, '__init__', return_value=None)
    mocker.patch.object(GitUtil, 'added_files', return_value=set())
    mocker.patch.object(GitUtil, 'modified_files', return_value=set())
    # ### SETUP ### #
    # Create all Necessary Temporary directories
    # create temp directory for the repo
    repo_dir = tmp_path / 'content_repo'
    repo_dir.mkdir()
    get_content_path_mock.return_value = repo_dir
    get_python_version_mock.return_value = 3.7
    # create temp target dir in which we will create all the TestSuite content items to use in the contribution zip and
    # that will be deleted after
    target_dir = repo_dir / 'target_dir'
    target_dir.mkdir()
    # create temp directory in which the contribution zip will reside
    contribution_zip_dir = tmp_path / 'contrib_zip'
    contribution_zip_dir.mkdir()
    # Create fake content repo and contribution zip
    repo = Repo(repo_dir)
    contrib_zip = Contribution(target_dir, 'ContribTestPack', repo)
    contrib_zip.create_zip(contribution_zip_dir)
    # rename script-script0.yml unified to automation-script0.yml
    # this naming is aligned to how the server exports scripts in contribution zips
    rename_file_in_zip(
        contrib_zip.created_zip_filepath, 'automation/script-script0.yml', 'automation/automation-script0.yml'
    )

    # Convert Zip
    name = 'Contrib Test Pack'
    contribution_path = contrib_zip.created_zip_filepath
    description = 'test pack description here'
    author = 'Octocat Smith'
    contrib_converter_inst = ContributionConverter(
        name=name, contribution=contribution_path, description=description, author=author, no_pipenv=True)
    contrib_converter_inst.convert_contribution_to_pack()

    # Ensure directory/file structure output by conversion meets expectations

    # target_dir should have been deleted after creation of the zip file
    assert not target_dir.exists()

    converted_pack_path = repo_dir / 'Packs' / 'ContribTestPack'
    assert converted_pack_path.exists()

    scripts_path = converted_pack_path / 'Scripts'
    sample_script_path = scripts_path / 'SampleScript'
    script_yml = sample_script_path / 'SampleScript.yml'
    script_py = sample_script_path / 'SampleScript.py'
    script_readme_md = sample_script_path / 'README.md'
    unified_script_in_sample = sample_script_path / 'automation-script0.yml'
    unified_script = scripts_path / 'automation-script0.yml'

    assert scripts_path.exists()
    assert sample_script_path.exists()
    assert script_yml.exists()
    assert script_py.exists()
    assert script_readme_md.exists()

    # generated script readme should not be empty
    script_statinfo = os.stat(script_readme_md)
    assert script_statinfo and script_statinfo.st_size > 0
    # unified yaml of the script should have been deleted
    assert not unified_script_in_sample.exists()
    assert not unified_script.exists()

    integrations_path = converted_pack_path / 'Integrations'
    sample_integration_path = integrations_path / 'Sample'
    integration_yml = sample_integration_path / 'Sample.yml'
    integration_py = sample_integration_path / 'Sample.py'
    integration_description = sample_integration_path / 'Sample_description.md'
    integration_image = sample_integration_path / 'Sample_image.png'
    integration_readme_md = sample_integration_path / 'README.md'
    unified_yml = integrations_path / 'integration-integration0.yml'
    unified_yml_in_sample = sample_integration_path / 'integration-integration0.yml'
    integration_files = [integration_yml, integration_py, integration_description, integration_image,
                         integration_readme_md]
    for integration_file in integration_files:
        assert integration_file.exists()
    # generated integration readme should not be empty
    statinfo = os.stat(integration_readme_md)
    assert statinfo and statinfo.st_size > 0

    # unified yaml of the integration should have been deleted
    assert not unified_yml.exists()
    assert not unified_yml_in_sample.exists()