Exemplo n.º 1
0
    def get_modified_files(files_string, tag='master'):
        """Get lists of the modified files in your branch according to the files string.

        Args:
            files_string (string): String that was calculated by git using `git diff` command.
            tag (string): String of git tag used to update modified files

        Returns:
            (modified_files_list, added_files_list, deleted_files). Tuple of sets.
        """
        all_files = files_string.split('\n')
        deleted_files = set([])
        added_files_list = set([])
        modified_files_list = set([])
        old_format_files = set([])
        for f in all_files:
            file_data = f.split()
            if not file_data:
                continue

            file_status = file_data[0]
            file_path = file_data[1]

            if file_status.lower().startswith('r'):
                file_status = 'r'
                file_path = file_data[2]

            if checked_type(file_path, CODE_FILES_REGEX) and file_status.lower() != 'd' \
                    and not file_path.endswith('_test.py'):
                # naming convention - code file and yml file in packages must have same name.
                file_path = os.path.splitext(file_path)[0] + '.yml'
            elif file_path.endswith('.js') or file_path.endswith('.py'):
                continue

            if file_status.lower() in ['m', 'a', 'r'] and checked_type(file_path, OLD_YML_FORMAT_FILE) and \
                    FilesValidator.is_py_script_or_integration(file_path):
                old_format_files.add(file_path)
            elif file_status.lower() == 'm' and checked_type(
                    file_path) and not file_path.startswith('.'):
                modified_files_list.add(file_path)
            elif file_status.lower() == 'a' and checked_type(
                    file_path) and not file_path.startswith('.'):
                added_files_list.add(file_path)
            elif file_status.lower() == 'd' and checked_type(
                    file_path) and not file_path.startswith('.'):
                deleted_files.add(file_path)
            elif file_status.lower().startswith('r') and checked_type(
                    file_path):
                modified_files_list.add((file_data[1], file_data[2]))
            elif checked_type(file_path, [SCHEMA_REGEX]):
                modified_files_list.add(file_path)
            elif file_status.lower() not in KNOWN_FILE_STATUSES:
                print_error(file_path +
                            " file status is an unknown known one, "
                            "please check. File status was: " + file_status)
        modified_files_list, added_files_list, deleted_files = filter_packagify_changes(
            modified_files_list, added_files_list, deleted_files, tag)

        return modified_files_list, added_files_list, deleted_files, old_format_files
Exemplo n.º 2
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('version', help='Release version')
    arg_parser.add_argument('git_sha1', help='commit sha1 to compare changes with')
    arg_parser.add_argument('asset_id', help='Asset ID')
    arg_parser.add_argument('server_version', help='Server version')
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()

    tag = get_last_release_version()
    print('Last release version: {}'.format(tag))

    # get changed yaml/json files (filter only relevant changed files)
    file_validator = FilesValidator()
    try:
        change_log = run_command('git diff --name-status {}'.format(args.git_sha1), exit_on_error=False)
        modified_files, added_files, removed_files, _ = file_validator.get_modified_files(change_log)
        modified_files, added_files, removed_files = filter_packagify_changes(modified_files, added_files,
                                                                              removed_files, tag=tag)

        for file_path in added_files:
            create_file_release_notes('A', file_path)

        for file_path in modified_files:
            create_file_release_notes('M', file_path)

        for file_path in removed_files:
            handle_deleted_file(file_path, tag)

        # join all release notes
        res = []
        beta_res = []
        missing_release_notes = False
        for key in RELEASE_NOTES_ORDER:
            value = RELEASE_NOTE_GENERATOR[key]
            ans, beta_ans = value.generate_release_notes(args.server_version)
            if ans is None or value.is_missing_release_notes:
                missing_release_notes = True
            if ans:
                res.append(ans)
            if beta_ans:
                beta_res.append(beta_ans)

        release_notes = "\n---\n".join(res)
        beta_release_notes = "\n---\n".join(beta_res)
        create_content_descriptor(args.version, args.asset_id, release_notes, args.github_token, beta_rn=beta_release_notes)

        if missing_release_notes:
            print_error("Error: some release notes are missing. See previous errors.")
            sys.exit(1)
    except RuntimeError:
        print_error('Unable to get the SHA1 of the commit in which the version was released. This can happen if your '
                    'branch is not updated with origin master. Merge from origin master and, try again.\n'
                    'If you\'re not on a fork, run "git merge origin/master".\n'
                    'If you are on a fork, first set https://github.com/demisto/content to be '
                    'your upstream by running "git remote add upstream https://github.com/demisto/content". After '
                    'setting the upstream, run "git fetch upstream", and then run "git merge upstream/master". Doing '
                    'these steps will merge your branch with content master as a base.')
Exemplo n.º 3
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument('version', help='Release version')
    arg_parser.add_argument('git_sha1',
                            help='commit sha1 to compare changes with')
    arg_parser.add_argument('asset_id', help='Asset ID')
    arg_parser.add_argument('server_version', help='Server version')
    arg_parser.add_argument('--github-token', help='Github token')
    args = arg_parser.parse_args()

    tag = get_last_release_version()
    print('Last release version: {}'.format(tag))

    # get changed yaml/json files (filter only relevant changed files)
    file_validator = FilesValidator()
    change_log = run_command('git diff --name-status {}'.format(args.git_sha1))
    modified_files, added_files, removed_files, _ = file_validator.get_modified_files(
        change_log)
    modified_files, added_files, removed_files = filter_packagify_changes(
        modified_files, added_files, removed_files, tag=tag)

    for file_path in added_files:
        create_file_release_notes('A', file_path)

    for file_path in modified_files:
        create_file_release_notes('M', file_path)

    for file_path in removed_files:
        handle_deleted_file(file_path, tag)

    # join all release notes
    res = []
    beta_res = []
    missing_release_notes = False
    for key in RELEASE_NOTES_ORDER:
        value = RELEASE_NOTE_GENERATOR[key]
        ans, beta_ans = value.generate_release_notes(args.server_version)
        if ans is None or value.is_missing_release_notes:
            missing_release_notes = True
        if ans:
            res.append(ans)
        if beta_ans:
            beta_res.append(beta_ans)

    release_notes = "\n---\n".join(res)
    beta_release_notes = "\n---\n".join(beta_res)
    create_content_descriptor(args.version,
                              args.asset_id,
                              release_notes,
                              args.github_token,
                              beta_rn=beta_release_notes)

    if missing_release_notes:
        print_error(
            "Error: some release notes are missing. See previous errors.")
        sys.exit(1)