Beispiel #1
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('version', help='Release version')
    arg_parser.add_argument('git_sha1', help='commit sha1 to compare changes with')
    arg_parser.add_argument('server_version', help='Server version')
    arg_parser.add_argument('-d', '--date', help='release date in the format %Y-%m-%d', required=False)
    args = arg_parser.parse_args()

    date = args.date if args.date else datetime.now().strftime('%Y-%m-%d')

    # get changed yaml/json files (filter only relevant changed files)
    fv = FilesValidator()
    change_log = run_command('git diff --name-status {}'.format(args.git_sha1))
    modified_files, added_files, _, _ = fv.get_modified_files(change_log)

    for file_path in get_changed_content_entities(modified_files, added_files):
        if not should_clear(file_path, args.server_version):
            continue
        rn_path = get_release_notes_file_path(file_path)
        if os.path.isfile(rn_path):
            # if file exist, mark the current notes as release relevant
            with open(rn_path, 'r+') as rn_file:
                text = rn_file.read()
                rn_file.seek(0)
                text = text.replace(UNRELEASE_HEADER, CHANGE_LOG_FORMAT.format(version=args.version, date=date))
                rn_file.write(text)
def get_new_and_modified_integrations(git_sha1):
    '''Return 2 lists - list of new integrations and list of modified integrations since the commit of the git_sha1.

    Args:
        git_sha1 (str): The git sha of the commit against which we will run the 'git diff' command.

    Returns:
        (tuple): Returns a tuple of two lists, the names of the new integrations, and the names of
            modified integrations.
    '''
    # get changed yaml files (filter only added and modified files)
    tag = get_last_release_version()
    file_validator = FilesValidator()
    change_log = run_command('git diff --name-status {}'.format(git_sha1))
    modified_files, added_files, removed_files, old_format_files = file_validator.get_modified_files(change_log, tag)
    all_integration_regexes = YML_INTEGRATION_REGEXES
    all_integration_regexes.extend([INTEGRATION_REGEX, PACKS_INTEGRATION_REGEX, BETA_INTEGRATION_REGEX])
    added_integration_files = [
        file_path for file_path in added_files if checked_type(file_path, all_integration_regexes)
    ]
    modified_integration_files = [
        file_path for file_path in modified_files if
        isinstance(file_path, str) and checked_type(file_path, all_integration_regexes)
    ]

    new_integrations_names = [
        filepath_to_integration_name(file_path) for
        file_path in added_integration_files if filepath_to_integration_name(file_path)
    ]
    modified_integrations_names = [
        filepath_to_integration_name(file_path) for
        file_path in modified_integration_files if filepath_to_integration_name(file_path)
    ]
    return new_integrations_names, modified_integrations_names
Beispiel #3
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('version', help='Release version')
    arg_parser.add_argument('git_sha1', help='commit sha1 to compare changes with')
    arg_parser.add_argument('asset_id', help='Asset ID')
    arg_parser.add_argument('server_version', help='Server version')
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()

    tag = get_last_release_version()
    print('Last release version: {}'.format(tag))

    # get changed yaml/json files (filter only relevant changed files)
    file_validator = FilesValidator()
    try:
        change_log = run_command('git diff --name-status {}'.format(args.git_sha1), exit_on_error=False)
        modified_files, added_files, removed_files, _ = file_validator.get_modified_files(change_log)
        modified_files, added_files, removed_files = filter_packagify_changes(modified_files, added_files,
                                                                              removed_files, tag=tag)

        for file_path in added_files:
            create_file_release_notes('A', file_path)

        for file_path in modified_files:
            create_file_release_notes('M', file_path)

        for file_path in removed_files:
            handle_deleted_file(file_path, tag)

        # join all release notes
        res = []
        beta_res = []
        missing_release_notes = False
        for key in RELEASE_NOTES_ORDER:
            value = RELEASE_NOTE_GENERATOR[key]
            ans, beta_ans = value.generate_release_notes(args.server_version)
            if ans is None or value.is_missing_release_notes:
                missing_release_notes = True
            if ans:
                res.append(ans)
            if beta_ans:
                beta_res.append(beta_ans)

        release_notes = "\n---\n".join(res)
        beta_release_notes = "\n---\n".join(beta_res)
        create_content_descriptor(args.version, args.asset_id, release_notes, args.github_token, beta_rn=beta_release_notes)

        if missing_release_notes:
            print_error("Error: some release notes are missing. See previous errors.")
            sys.exit(1)
    except RuntimeError:
        print_error('Unable to get the SHA1 of the commit in which the version was released. This can happen if your '
                    'branch is not updated with origin master. Merge from origin master and, try again.\n'
                    'If you\'re not on a fork, run "git merge origin/master".\n'
                    'If you are on a fork, first set https://github.com/demisto/content to be '
                    'your upstream by running "git remote add upstream https://github.com/demisto/content". After '
                    'setting the upstream, run "git fetch upstream", and then run "git merge upstream/master". Doing '
                    'these steps will merge your branch with content master as a base.')
Beispiel #4
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('version', help='Release version')
    arg_parser.add_argument('git_sha1',
                            help='commit sha1 to compare changes with')
    arg_parser.add_argument('asset_id', help='Asset ID')
    arg_parser.add_argument('server_version', help='Server version')
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()

    tag = get_last_release_version()
    print('Last release version: {}'.format(tag))

    # get changed yaml/json files (filter only relevant changed files)
    file_validator = FilesValidator()
    change_log = run_command('git diff --name-status {}'.format(args.git_sha1))
    modified_files, added_files, removed_files, _ = file_validator.get_modified_files(
        change_log)
    modified_files, added_files, removed_files = filter_packagify_changes(
        modified_files, added_files, removed_files, tag=tag)

    for file_path in added_files:
        create_file_release_notes('A', file_path)

    for file_path in modified_files:
        create_file_release_notes('M', file_path)

    for file_path in removed_files:
        handle_deleted_file(file_path, tag)

    # join all release notes
    res = []
    beta_res = []
    missing_release_notes = False
    for key in RELEASE_NOTES_ORDER:
        value = RELEASE_NOTE_GENERATOR[key]
        ans, beta_ans = value.generate_release_notes(args.server_version)
        if ans is None or value.is_missing_release_notes:
            missing_release_notes = True
        if ans:
            res.append(ans)
        if beta_ans:
            beta_res.append(beta_ans)

    release_notes = "\n---\n".join(res)
    beta_release_notes = "\n---\n".join(beta_res)
    create_content_descriptor(args.version,
                              args.asset_id,
                              release_notes,
                              args.github_token,
                              beta_rn=beta_release_notes)

    if missing_release_notes:
        print_error(
            "Error: some release notes are missing. See previous errors.")
        sys.exit(1)
Beispiel #5
0
def test_get_modified_files_without_packagify(mocker):
    mocker.patch(
        'Tests.scripts.hook_validations.conf_json.ConfJsonValidator.load_conf_file',
        return_value={})
    file_validator = FilesValidator()

    changed_files = '''A       Integrations/Recorded_Future/CHANGELOG.md
A       Integrations/Recorded_Future/Recorded_Future.py
A       Integrations/Recorded_Future/Recorded_Future.yml
A       Integrations/Recorded_Future/Recorded_Future_image.png
D       Integrations/integration-Recorded_Future.yml'''

    mocker.patch('Tests.scripts.validate_files.filter_packagify_changes',
                 side_effect=packagify_mock_no_change)
    modified, added, deleted, old_format = file_validator.get_modified_files(
        changed_files)
    assert len(modified) == 0
    assert len(added) == 1
    assert 'Integrations/Recorded_Future/Recorded_Future.yml' in added
    assert len(deleted) == 1
    assert 'Integrations/integration-Recorded_Future.yml' in deleted

    changed_files = 'R100       Integrations/Recorded_Future/Recorded_Future.yml ' \
                    'Integrations/Recorded_Future/test_data/Recorded_Future.yml'
    modified, added, deleted, old_format = file_validator.get_modified_files(
        changed_files)
    assert len(modified) == 0
    assert len(added) == 0
    assert len(deleted) == 0

    changed_files = 'R100       Integrations/Recorded_Future_v2/Recorded_Future.py ' \
                    'Integrations/Recorded_Future/Recorded_Future.py'
    modified, added, deleted, old_format = file_validator.get_modified_files(
        changed_files)
    assert len(modified) == 1
    assert 'Integrations/Recorded_Future/Recorded_Future.yml' in modified
    assert len(added) == 0
    assert len(deleted) == 0

    changed_files = 'R34       Integrations/Recorded_Future/Recorded_Future_v2.yml ' \
                    'Integrations/Recorded_Future/Recorded_Future.yml'
    modified, added, deleted, old_format = file_validator.get_modified_files(
        changed_files)
    assert len(modified) == 1
    assert ('Integrations/Recorded_Future/Recorded_Future_v2.yml',
            'Integrations/Recorded_Future/Recorded_Future.yml') in modified
    assert len(added) == 0
    assert len(deleted) == 0

    changed_files = 'A       Integrations/Recorded_Future/some_yml.yml'
    modified, added, deleted, old_format = file_validator.get_modified_files(
        changed_files)
    assert len(modified) == 0
    assert len(added) == 0
    assert len(deleted) == 0
Beispiel #6
0
def test_get_modified_files_packagify(mocker):
    mocker.patch(
        'Tests.scripts.hook_validations.conf_json.ConfJsonValidator.load_conf_file',
        return_value={})
    file_validator = FilesValidator()

    changed_files = '''A       Integrations/Recorded_Future/CHANGELOG.md
    A       Integrations/Recorded_Future/Recorded_Future.py
    A       Integrations/Recorded_Future/Recorded_Future.yml
    A       Integrations/Recorded_Future/Recorded_Future_image.png
    D       Integrations/integration-Recorded_Future.yml'''

    mocker.patch('Tests.test_utils.get_remote_file',
                 return_value={'name': 'Recorded Future'})
    # in python 3, this should be 'builtins.open'
    mocker.patch('__builtin__.open',
                 mocker.mock_open(read_data="{'name': 'Recorded Future'}"))
    modified, added, deleted, old_format = file_validator.get_modified_files(
        changed_files)
    assert len(modified) == 1
    assert ('Integrations/integration-Recorded_Future.yml',
            'Integrations/Recorded_Future/Recorded_Future.yml') in modified
    assert len(added) == 0
    assert len(deleted) == 0