Пример #1
0
def cleanup_cmdb_repo_to_v2_0_0(root_dir, dry_run):
    config_dirs = Search.match_dirs(os.path.join('**', 'config'),
                                    root=root_dir)
    for config_dir in config_dirs:
        solutions_dir = os.path.join(config_dir, 'solutionsv2')
        if os.path.isdir(solutions_dir):
            logger.info('%sDeleting %s', dry_run, solutions_dir)
            if dry_run:
                continue
            shutil.rmtree(solutions_dir)
    return True
Пример #2
0
def find_gen3_account_infrastructure_dir(root_dir, account):
    patterns = [
        os.path.join('**', 'infrastructure', '**', account),
        os.path.join('**', account, 'infrastructure')
    ]
    matches = Search.match_dirs(*patterns,
                                root=root_dir,
                                include_file_dirs=True)
    if not matches:
        raise Exception("Can't find account infrastructure dir")
    return matches[0]
Пример #3
0
def find_gen3_account_dir(root_dir, account):
    patterns = [
        os.path.join('**', account, 'account.json'),
        os.path.join('**', account, 'config', 'account.json')
    ]
    matches = Search.match_dirs(*patterns,
                                root=root_dir,
                                include_file_dirs=True)
    if not matches:
        raise Exception("Can't find account dir")
    return matches[0]
Пример #4
0
def find_gen3_environment_dir(root_dir, product, environment):
    product_dir = find_gen3_product_dir(root_dir, product)
    patterns = [
        os.path.join('**', 'solutionsv2', environment, 'environment.json')
    ]
    matches = Search.match_dirs(*patterns,
                                root=product_dir,
                                include_file_dirs=True)
    if not matches:
        raise Exception("Can't find environment dir")
    return matches[0]
Пример #5
0
def cleanup_cmdb_repo_to_v1_1_0(root_dir, dry_run):
    for source in UPGRADE_V1_1_0_SOURCES:
        source_dirs = Search.match_dirs(os.path.join('**', source),
                                        root=root_dir)
        for source_dir in source_dirs:
            target_dir = os.path.join(os.path.dirname(source_dir),
                                      UPGRADE_V1_1_0_SOURCES[source])
            logger.debug('Checking %s', source_dir)
            logger.debug('Target dir %s', target_dir)
            if not os.path.isdir(target_dir):
                continue
            logger.info('%sDeleting %s', dry_run, source_dir)
            if dry_run:
                continue
            shutil.rmtree(source_dir)
    return True
Пример #6
0
def upgrade_cmdb_repo_to_v2_0_1(root_dir, dry_run):
    # Reorganise state files into a directory tree based on deployment unit and placement
    #
    # The format of the state tree will follow the pattern
    # state/{df}/{env}/{seg}/{du}/{placement}
    #
    # Delete definition files because their file name contains the occurrence name not the
    # deployment unit. They will be regenerated into the correct dir on the next build.
    state_dirs = Search.match_dirs(os.path.join('**', 'state'), root=root_dir)
    for state_dir in state_dirs:
        deployment_frameworks = Search.match_dirs('*', root=state_dir)
        for df_dir in deployment_frameworks:
            deployment_framework = os.path.basename(df_dir)
            logger.debug('%sChecking %s deployment_framework', dry_run,
                         deployment_framework)
            state_files = Search.match_files(os.path.join('**', '*'),
                                             root=df_dir)
            for state_file in state_files:
                state_basename = os.path.basename(state_file)
                state_dirname = os.path.dirname(state_file)
                stack_deployment_unit = ""
                # Filename format varies with deployment framework
                pattern_1 = r"([a-z0-9]+)-(.+)-([a-z][a-z0-9]+)-([a-z]{2}-[a-z]+-[1-9])(-pseudo)?-(.+)"
                pattern_2 = r"([a-z0-9]+)-(.+)-([a-z][a-z0-9]+)-(eastus|australiaeast|australiasoutheast|australiacentral|australiacentral2)(-pseudo)?-(.+)"  # noqa
                match = re.match(pattern_1, state_basename) or re.match(
                    pattern_2, state_basename)
                stack_level = ''
                stack_deployment_unit = ''
                stack_region = ''
                if match:
                    stack_level = match.group(1)
                    stack_deployment_unit = match.group(2)
                    stack_region = match.group(4)
                if not stack_deployment_unit:
                    # Legacy account formats
                    match = re.match(
                        r"account-([a-z][a-z0-9]+)-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                        state_basename)
                    if match:
                        stack_level = "account"
                        stack_deployment_unit = match.group(1)
                        stack_region = match.group(2)
                    match = re.match(r"account-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                                     state_basename)
                    if match:
                        stack_level = "account"
                        stack_deployment_unit = "s3"
                        stack_region = match.group(1)
                if not stack_deployment_unit:
                    # Legacy product formats
                    match = re.match(r"product-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                                     state_basename)
                    if match:
                        stack_level = "product"
                        stack_deployment_unit = "cmk"
                        stack_region = match.group(1)
                if not stack_deployment_unit:
                    # Legacy segment formats
                    match = re.match(r"seg-key-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                                     state_basename)
                    if match:
                        stack_level = "seg"
                        stack_deployment_unit = "cmk"
                        stack_region = match.group(1)
                    match = re.match(
                        r"cont-([a-z][a-z0-9]+)-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                        state_basename)
                    if match:
                        stack_level = "seg"
                        stack_deployment_unit = match.group(1)
                        stack_region = match.group(2)
                if not stack_deployment_unit:
                    logger.warn(
                        '%sIgnoring %s, doesn\'t match one of the expected state filename formats',
                        dry_run, state_basename)
                    continue
                if stack_level == 'defn':
                    # Definition files are copied on every template creation
                    logger.info('%sDeleting %s', dry_run, state_file)
                    if dry_run:
                        continue
                    os.remove(state_file)
                else:
                    # Add deployment unit based subdirectories
                    if stack_deployment_unit in state_dirname:
                        logger.debug('%sIgnoring %s, already moved', dry_run,
                                     state_file)
                    else:
                        du_dir = format_unit_cf_dir(state_dirname, stack_level,
                                                    stack_deployment_unit, '',
                                                    stack_region)
                        src = state_file
                        dst = os.path.join(du_dir, state_basename)
                        logger.info('%sMoving %s to %s', dry_run, src, dst)
                        if dry_run:
                            continue
                        if not os.path.isdir(du_dir):
                            os.makedirs(du_dir, exist_ok=True)
                        shutil.move(src, dst)
    return True
Пример #7
0
def upgrade_cmdb_repo_to_v2_0_0(root_dir, dry_run):
    # Reorganise cmdb to make it easier to manage via branches and dynamic cmdbs
    #
    # State is now in its own directory at the same level as config and infrastructure
    # Solutions is now under infrastructure
    # Builds are separated from settings and are now under infrastructure
    # Operations are now in their own directory at same level as config and
    # infrastructure. For consistency with config, a settings subdirectory has been
    # added.
    #
    # With this layout,
    # - infrastructure should be the same across environments assuming no builds
    #   are being promoted
    # - product and operations settings are managed consistently
    # - all the state info is cleanly separated (so potentially in its own repo)
    #
    # /config/settings
    # /operations/settings
    # /infrastructure/solutions
    # /infrastructure/builds
    # /state/cf
    # /state/cot
    #
    # If config and infrastructure are not in the one repo, then the upgrade must
    # be performed manually and the cmdb version manually updated
    config_dirs = Search.match_dirs(os.path.join('**', 'config'),
                                    root=root_dir)
    for config_dir in config_dirs:
        base_dir = os.path.dirname(config_dir)
        solutions_dir = os.path.join(config_dir, 'solutionsv2')
        settings_dir = os.path.join(config_dir, 'settings')
        infrastructure_dir = os.path.join(base_dir, 'infrastructure')
        state_dir = os.path.join(base_dir, 'state')
        operations_dir = os.path.join(base_dir, 'operations')
        state_subdirs = [
            os.path.join(infrastructure_dir, 'cf'),
            os.path.join(infrastructure_dir, 'cot')
        ]
        if not os.path.isdir(infrastructure_dir):
            logger.warn(
                '%sUpdate to v2.0.0 for %s must be manually performed for split cmdb repos',
                dry_run, config_dir)
            continue
        logger.debug('%sChecking %s', dry_run, base_dir)
        # Move the state into its own top level tree
        os.makedirs(base_dir, exist_ok=True)
        for state_subdir in state_subdirs:
            if os.path.isdir(state_subdir):
                src = state_subdir
                dst = os.path.join(state_dir, os.path.basename(state_subdir))
                logger.info('%sMoving %s to %s', dry_run, src, dst)
                if dry_run:
                    continue
                shutil.move(src, dst)
        # Move operations settings into their own top level tree
        orig_operations_settings_dir = os.path.join(infrastructure_dir,
                                                    'operations')
        new_operation_settings_dir = os.path.join(operations_dir, 'settings')
        if os.path.isdir(orig_operations_settings_dir):
            logger.info('%sMoving %s to %s', dry_run,
                        orig_operations_settings_dir,
                        new_operation_settings_dir)
            if dry_run:
                continue
            if not os.path.isdir(new_operation_settings_dir):
                os.makedirs(operations_dir, exist_ok=True)
                shutil.move(orig_operations_settings_dir,
                            new_operation_settings_dir)
        # Copy the solutions tree from config to infrastructure and rename
        if os.path.isdir(solutions_dir):
            logger.info('%sCopying %s to %s', dry_run, solutions_dir,
                        infrastructure_dir)
            if not dry_run:
                # Leave existing solutions dir in place as it may be the current directory
                src = solutions_dir
                dst = os.path.join(infrastructure_dir, os.path.basename(src))
                shutil.copytree(src, dst)
            src = os.path.join(infrastructure_dir, 'solutionsv2')
            dst = os.path.join(infrastructure_dir, 'solutions')
            logger.info('%sRenaming %s to %s', dry_run, src, dst)
            if not dry_run:
                shutil.move(src, dst)
        # Copy the builds into their own tree
        builds_dir = os.path.join(infrastructure_dir, 'builds')
        if not os.path.isdir(builds_dir):
            src = settings_dir
            dst = os.path.join(builds_dir, os.path.basename(src))
            logger.info('%sCopying %s to %s', dry_run, src, dst)
            if not dry_run:
                shutil.copytree(src, dst)
        # Remove the build files from the settings tree
        # Blob will pick up build references and shared builds
        logger.info('%sCleaning the settings tree', dry_run)
        setting_files = Search.match_files(os.path.join('**', '*build.json'),
                                           root=settings_dir)
        for setting_file in setting_files:
            logger.info('%sDeleting %s', dry_run, setting_file)
            if dry_run:
                continue
            os.remove(setting_file)
        # Build tree should only contain build references and shared builds
        logger.info('%sCleaning the builds tree', dry_run)
        if dry_run:
            build_files = Search.match_files(os.path.join('**', '*'),
                                             root=settings_dir)
        else:
            build_files = Search.match_files(os.path.join('**', '*'),
                                             root=builds_dir)
        build_files = [
            filename for filename in build_files
            if not filename.endswith('build.json')
        ]
        for build_file in build_files:
            logger.info('%sDeleting %s', dry_run, build_file)
            if dry_run:
                continue
            os.remove(build_file)

    return True
Пример #8
0
def upgrade_cmdb_repo_to_v1_3_1(root_dir, dry_run):
    # Find accounts
    account_files = Search.match_files(os.path.join('**', 'account.json'),
                                       root=root_dir)
    account_mappings = dict()
    for account_file in account_files:
        with open(account_file, 'rt') as f:
            account = json.load(f)
            account_mappings[account['Account']
                             ['AWSId']] = account['Account']['Id']
    cf_dirs = Search.match_dirs(os.path.join('**', 'cf'), root=root_dir)
    for cf_dir in cf_dirs:
        cmk_stacks = Search.match_files(os.path.join(
            '**', 'seg-cmk-*[0-9]-stack.json'),
                                        root=cf_dir)
        for cmk_stack in cmk_stacks:
            logger.info('Looking for CMK account in %s', cmk_stack)
            with open(cmk_stack, 'rt') as f:
                cmk_stack_data = json.load(f)
            stack_outputs = cmk_stack_data['Stacks'][0]['Outputs']
            cmk_account = None
            for output in stack_outputs:
                if output['OutputKey'] == 'Account':
                    cmk_account = output['OutputValue']
            if cmk_account:
                cmk_account_id = account_mappings[cmk_account]
                cmk_path = os.path.dirname(cmk_stack)
                segment_cf = Search.match_files(os.path.join('**', '*'),
                                                root=cmk_path)
                for cf_file in segment_cf:
                    parsed_stack = parse_stack_filename(cf_file)
                    stack_dir = os.path.dirname(cf_file)
                    if not parsed_stack['stack_account']:
                        cf_basename = os.path.basename(cf_file)
                        new_cf_basename = cf_basename.replace(
                            f'-{parsed_stack["stack_region"]}-',
                            f'-{cmk_account_id}-{parsed_stack["stack_region"]}-'
                        )
                        move_file = True
                        new_cf_file = os.path.join(stack_dir, new_cf_basename)
                        if cf_basename != new_cf_basename and cmk_account_id not in cf_basename:
                            if os.path.isfile(new_cf_file):
                                if filecmp.cmp(cf_file, new_cf_file, False):
                                    move_file = False
                                else:
                                    logger.fatal(
                                        'Rename failed - %s already exists. Manual intervention necessary.',
                                        new_cf_file)
                                    return False
                        if cf_file == new_cf_file:
                            logger.debug('Skipping %s, path is not changed',
                                         new_cf_file)
                            continue
                        if move_file:
                            logger.debug('Moving %s to %s', cf_file,
                                         new_cf_file)
                        else:
                            logger.warn('%s already upgraded - removing',
                                        cf_file)
                        if dry_run:
                            continue
                        if move_file:
                            shutil.move(cf_file, new_cf_file)
                        else:
                            os.remove(cf_file)
    return True
Пример #9
0
def upgrade_cmdb_repo_to_v1_3_0(root_dir, dry_run):
    # Find accounts
    account_files = Search.match_files(os.path.join('**', 'account.json'),
                                       root=root_dir)
    account_mappings = dict()
    for account_file in account_files:
        with open(account_file, 'rt') as f:
            account = json.load(f)
            account_mappings[account['Account']
                             ['AWSId']] = account['Account']['Id']
    cf_dirs = Search.match_dirs(os.path.join('**', 'cf'), root=root_dir)
    for cf_dir in cf_dirs:
        cmk_stacks = Search.match_files(os.path.join(
            '**', 'seg-cmk-*[0-9]-stack.json'),
                                        root=cf_dir)
        for cmk_stack in cmk_stacks:
            logger.info('Looking for CMK account in %s', cmk_stack)
            with open(cmk_stack, 'rt') as f:
                cmk_stack_data = json.load(f)
            stack_outputs = cmk_stack_data['Stacks'][0]['Outputs']
            cmk_account = None
            cmk_region = None
            for output in stack_outputs:
                if output['OutputKey'] == 'Account':
                    cmk_account = output['OutputValue']
                elif output['OutputKey'] == 'Region':
                    cmk_region = output['OutputValue']
            if cmk_account:
                cmk_account_id = account_mappings[cmk_account]
                cmk_path = os.path.dirname(cmk_stack)
                segment_stacks = Search.match_files(os.path.join(
                    '**', '*stack.json'),
                                                    root=cmk_path)
                for stack_file in segment_stacks:
                    parsed_stack = parse_stack_filename(stack_file)
                    stack_dir = os.path.dirname(stack_file)
                    stack_filename = os.path.basename(stack_file)
                    with open(stack_file, 'rt') as f:
                        stack_data = json.load(f)
                    stack_outputs = stack_data['Stacks'][0]['Outputs']
                    stackoutput_account = None
                    stackoutput_region = None
                    for output in stack_outputs:
                        if output['OutputKey'] == 'Account':
                            stackoutput_account = output['OutputValue']
                        elif output['OutputKey'] == 'Region':
                            stackoutput_region = output['OutputValue']
                    if not stackoutput_account:
                        logger.debug('Adding Account Output to %s', stack_file)
                        for stack in stack_data['Stacks']:
                            stack['Outputs'].append({
                                'OutputKey': 'Account',
                                'OutputValue': cmk_account
                            })
                    if not stackoutput_region:
                        logger.debug('Adding Region Output to %s', stack_file)
                        for stack in stack_data['Stacks']:
                            stack['Outputs'].append({
                                'OutputKey':
                                'Region',
                                'OutputValue':
                                parsed_stack['stack_region']
                            })
                    if not stackoutput_region or not stackoutput_account:
                        with open(stack_file, 'wt') as f:
                            json.dump(stack_data, f, indent=4)
                    if not parsed_stack['stack_account']:
                        new_stack_file_name = os.path.basename(
                            stack_file
                        ).replace(
                            f'-{parsed_stack["stack_region"]}-',
                            f'-{cmk_account_id}-{parsed_stack["stack_region"]}-'
                        )
                        if stack_filename != new_stack_file_name and cmk_account_id not in stack_filename:
                            src = stack_file
                            dst = os.path.join(stack_dir, new_stack_file_name)
                            logger.debug('Moving %s to %s', src, dst)
                            if dry_run:
                                continue
                            shutil.move(src, dst)
                # Rename SSH keys to include Account/Region
                operations_path = cmk_path.replace(
                    os.path.join('infrastructure', 'cf'),
                    os.path.join('infrastructure', 'operations'))
                logger.info('Checking for SSH Keys in %s', operations_path)
                pem_files = Search.match_files(os.path.join(
                    '**', '.aws-ssh*.pem*'),
                                               root=operations_path)
                for pem_file in pem_files:
                    pem_dir = os.path.dirname(pem_file)
                    pem_basename = os.path.basename(pem_file)
                    new_basename = pem_basename.replace(
                        'aws-', f'aws-{cmk_account_id}-{cmk_region}-')
                    # Move the pem files to make them invisible to the generation process
                    src = pem_file
                    dst = os.path.join(pem_dir, new_basename)
                    logger.debug('Moving %s to %s', src, dst)
                    if dry_run:
                        continue
                    shutil.move(src, dst)
    return True
Пример #10
0
def upgrade_cmdb_repo_to_v1_1_0(root_dir, dry_run):
    for source in UPGRADE_V1_1_0_SOURCES:
        source_dirs = Search.match_dirs(os.path.join('**', source),
                                        root=root_dir)
        for source_dir in source_dirs:
            target_dir = os.path.join(os.path.dirname(source_dir),
                                      UPGRADE_V1_1_0_SOURCES[source])
            logger.debug('Checking %s', source_dir)
            if os.path.isdir(target_dir):
                continue
            logger.info('Converting %s into %s', source_dir, target_dir)
            if source == 'aws':
                upgrade_cmdb_repo_to_v1_1_0_state(source_dir, dry_run,
                                                  target_dir)
            else:
                upgrade_cmdb_repo_to_v1_1_0_settings(source_dir, dry_run,
                                                     target_dir)
            if dry_run:
                continue
            # Special processing
            if source == 'solutions':
                # Shared solution files are specific to the default segment
                shared_default_dir = os.path.join(target_dir, 'shared',
                                                  'default')
                os.makedirs(shared_default_dir, exist_ok=True)
                target_shared_dir = os.path.join(target_dir, 'shared')
                solution_files = Search.list_files(target_shared_dir)
                for solution_file in solution_files:
                    src = os.path.join(target_shared_dir, solution_file)
                    dst = os.path.join(shared_default_dir, solution_file)
                    logger.debug('Moving %s to %s', src, dst)
                    shutil.move(src, dst)
                # Process environments
                segment_files = Search.match_files(os.path.join(
                    '**', 'segment.json'),
                                                   root=target_dir)
                for segment_file in segment_files:
                    segment_dir = os.path.dirname(segment_file)
                    environment_dir = os.path.dirname(segment_dir)
                    # Add environment.json file
                    with open(segment_file, 'rt') as f:
                        segment = json.load(f)
                    environment_id = segment.get('Segment',
                                                 {}).get('Environment')
                    environment_file = os.path.join(environment_dir,
                                                    'environment.json')
                    logger.debug('Creating %s', environment_file)
                    with open(environment_file, 'wt+') as f:
                        json.dump({'Environment': {'Id': environment_id}}, f)
                    logger.debug('Cleaning %s', segment_file)
                    segment_legacy_keys = [
                        'Id', 'Name', 'Title', 'Environment'
                    ]
                    for segment_legacy_key in segment_legacy_keys:
                        try:
                            del segment['Segment'][segment_legacy_key]
                        except KeyError:
                            pass
                    with open(segment_file, 'wt') as f:
                        json.dump(segment, f)
                shared_segment_file = os.path.join(shared_default_dir,
                                                   'segment.json')
                logger.debug('Creating %s', shared_segment_file)
                with open(shared_segment_file, 'wt+') as f:
                    json.dump({'Segment': {'Id': 'default'}}, f)
            elif source == 'credentials':
                pem_files = Search.match_files(os.path.join(
                    '**', 'aws-ssh*.pem'),
                                               root=target_dir)
                for pem_file in pem_files:
                    filename = os.path.basename(pem_file)
                    segment_dir = os.path.dirname(pem_file)
                    # Move the pem files to make them invisible to the generation process
                    src = pem_file
                    dst = os.path.join(segment_dir, '.' + filename)
                    logger.debug('Moving %s to %s', src, dst)
                    shutil.move(src, dst)
                    segment_ignore_file = os.path.join(segment_dir,
                                                       '.gitignore')
                    if not os.path.isfile(segment_ignore_file):
                        logger.debug('Creaging %s', segment_ignore_file)
                        ignore_list = ['*.plaintext', '*.decrypted', '*.ppk']
                        with open(segment_ignore_file, 'wt+') as f:
                            f.write('\n'.join(ignore_list))
    return True
Пример #11
0
def process_cmdb(root_dir, action, gen3_version, versions, dry_run):
    cmdb_git_repos = Search.match_dirs(os.path.join('**', '*.git'),
                                       root=root_dir)
    dry_run = '(Dryrun)' if dry_run else ''

    for cmdb_git_repo in cmdb_git_repos:

        cmdb_repo = os.path.dirname(cmdb_git_repo)
        cmdb_version_file = os.path.join(cmdb_repo, '.cmdb')
        current_version = ''
        pin_version = ''
        logger.debug('Checking repo %s', cmdb_repo)
        if os.path.isfile(cmdb_version_file):
            with open(cmdb_version_file, 'rt') as f:
                cmdb_version_data = json.load(f)
            current_version = cmdb_version_data.get('Version', {}).get(
                action.capitalize())
            pin_version = cmdb_version_data.get('Pin',
                                                {}).get(action.capitalize())
            logger.debug("Repo pinned at %s version %s", pin_version,
                         current_version)
        else:
            with open(cmdb_version_file, 'wt+') as f:
                json.dump({}, f)

        current_version = current_version or 'v0.0.0'
        if utils.semver_compare(current_version, versions[-1]) >= 0:
            logger.debug(
                '%s of repo "%s" to %s is not required - skipping all version checks',
                action.capitalize(), cmdb_repo, versions[-1])
            continue

        for version in versions:
            if utils.semver_compare(current_version, version) >= 0:
                logger.debug('%s of repo "%s" to %s is not required',
                             action.capitalize(), cmdb_repo, version)
            else:
                logger.info('%s%s of repo "%s" to %s required ...', dry_run,
                            action.capitalize(), cmdb_repo, version)
            if pin_version:
                if utils.semver_compare(current_version, pin_version) < 0:
                    logger.warn(
                        '%s of repo "%s" to %s prevented by pin version %s',
                        action.capitalize(), cmdb_repo, version, pin_version)
                    break
                else:
                    logger.debug(
                        '%s%s of repo "%s" to %s permitted by pin version %s',
                        dry_run, action.capitalize())
            compatibility = is_upgrade_compatible(version, gen3_version)
            if compatibility == 'incompatible':
                logger.warn((
                    '%s%s of repo "%s" to %s is not compatible with the current gen3 framework version of %s. '
                    'Skipping upgrade process ...'), dry_run,
                            action.capitalize(), cmdb_repo, version,
                            gen3_version)
                break
            elif compatibility == 'unknown':
                logger.warn((
                    '%s%s of repo "%" to %s requires the GEN3 framework version to be defined.'
                    'Skipping upgrade process ...'), dry_run,
                            action.capitalize(), cmdb_repo, version)
                break
            else:
                logger.debug('%s%s of repo "%s" to %s is compatible', dry_run,
                             action.capitalize(), cmdb_repo, version)

            cmdb_action_func = globals(
            )[f'{action.lower()}_cmdb_repo_to_{version.replace(".", "_")}']
            if cmdb_action_func(cmdb_repo, dry_run):
                if dry_run:
                    logger.debug('%sSkipping later versions', dry_run)
                    break
                logger.info('%s of repo "%s" to %s successful',
                            action.capitalize(), cmdb_repo, version)
                with open(cmdb_version_file, 'rt') as f:
                    cmdb_version_data = json.load(f)
                utils.deep_dict_update(
                    cmdb_version_data,
                    {'Version': {
                        action.capitalize(): version
                    }})
                with open(cmdb_version_file, 'wt') as f:
                    json.dump(cmdb_version_data, f)
                current_version = version

    return True