Esempio n. 1
0
def find_gen3_root_dir(dirname):
    marked_root_dir = Search.upwards(dirname, 'root.json')
    if marked_root_dir is not None:
        return os.path.dirname(marked_root_dir)
    config_root_dir = os.path.dirname(Search.upwards(dirname, 'config'))
    infrastructure_root_dir = os.path.dirname(
        Search.upwards(dirname, 'infrastructure'))
    root_dir = config_root_dir or infrastructure_root_dir
    if Search.isdir(root_dir, 'config') and Search.isdir(
            root_dir, 'infrastructure'):
        return root_dir
    return None
Esempio n. 2
0
def upgrade_build_ref(root_dir, dry_run):
    legacy_files = Search.match_files(os.path.join('**', 'build.ref'),
                                      root=root_dir)
    for legacy_file in legacy_files:
        logger.debug('Checking %s', legacy_file)
        upgraded_file = os.path.join(os.path.dirname(legacy_file),
                                     'build.json')
        if os.path.isfile(upgraded_file):
            continue
        logger.info('Upgrading %s', legacy_file)
        with open(legacy_file, 'rt') as f:
            build_array = f.read().split(' ')
        try:
            build_data = {'Commit': build_array[0]}
        except IndexError as e:
            raise Exception(
                f"Unable to upgrade build reference in {legacy_file}") from e
        try:
            build_data['Tag'] = build_array[1]
        except IndexError:
            pass
        build_data['Formats'] = ['docker']
        build_data = json.dumps(build_data, indent=4)
        if dry_run:
            logger.info(build_data)
            continue
        with open(upgraded_file, 'wt+') as f:
            f.write(build_data)
Esempio n. 3
0
def cleanup_cmdb_repo_to_v1_0_0(root_dir, dry_run):
    # delete all .ref files. Because all of them converted into json format.
    legacy_files = Search.match_files(os.path.join('**', '*.ref'),
                                      root=root_dir)
    for legacy_file in legacy_files:
        logger.info('%sDeleting %s', dry_run, legacy_file)
        if dry_run:
            continue
        os.remove(legacy_file)
    # Change of naming from "container" to "segment"
    legacy_files = Search.match_files(os.path.join('**', 'container.json'),
                                      root=root_dir)
    for legacy_file in legacy_files:
        logger.info('%sDeleting %s', dry_run, legacy_file)
        if dry_run:
            continue
        os.remove(legacy_file)
    return True
Esempio n. 4
0
def find_gen3_account_infrastructure_dir(root_dir, account):
    patterns = [
        os.path.join('**', 'infrastructure', '**', account),
        os.path.join('**', account, 'infrastructure')
    ]
    matches = Search.match_dirs(*patterns,
                                root=root_dir,
                                include_file_dirs=True)
    if not matches:
        raise Exception("Can't find account infrastructure dir")
    return matches[0]
Esempio n. 5
0
def find_gen3_account_dir(root_dir, account):
    patterns = [
        os.path.join('**', account, 'account.json'),
        os.path.join('**', account, 'config', 'account.json')
    ]
    matches = Search.match_dirs(*patterns,
                                root=root_dir,
                                include_file_dirs=True)
    if not matches:
        raise Exception("Can't find account dir")
    return matches[0]
Esempio n. 6
0
def find_gen3_environment_dir(root_dir, product, environment):
    product_dir = find_gen3_product_dir(root_dir, product)
    patterns = [
        os.path.join('**', 'solutionsv2', environment, 'environment.json')
    ]
    matches = Search.match_dirs(*patterns,
                                root=product_dir,
                                include_file_dirs=True)
    if not matches:
        raise Exception("Can't find environment dir")
    return matches[0]
Esempio n. 7
0
def cleanup_cmdb_repo_to_v2_0_0(root_dir, dry_run):
    config_dirs = Search.match_dirs(os.path.join('**', 'config'),
                                    root=root_dir)
    for config_dir in config_dirs:
        solutions_dir = os.path.join(config_dir, 'solutionsv2')
        if os.path.isdir(solutions_dir):
            logger.info('%sDeleting %s', dry_run, solutions_dir)
            if dry_run:
                continue
            shutil.rmtree(solutions_dir)
    return True
Esempio n. 8
0
def upgrade_cmdb_repo_to_v1_1_0_settings(root_dir, dry_run, target_dir):
    # Create the shared file location for default segment
    shared_dir = os.path.join(target_dir, 'shared')
    os.makedirs(shared_dir, exist_ok=True)
    sub_files = Search.list_files(root_dir)
    for sub_file in sub_files:
        src = os.path.join(root_dir, sub_file)
        dst = os.path.join(shared_dir, sub_file)
        logger.debug('Copying %s to %s', src, dst)
        if dry_run:
            continue
        shutil.copy2(src, dst)
    sub_dirs = Search.list_dirs(root_dir)
    for sub_dir in sub_dirs:
        environment = sub_dir
        sub_dir = os.path.join(root_dir, sub_dir)
        segment_dir = os.path.join(target_dir, environment, 'default')
        os.makedirs(segment_dir, exist_ok=True)
        logger.debug('Copying %s to %s', sub_dir, segment_dir)
        if dry_run:
            continue
        for name in Search.list_all(sub_dir):
            src = os.path.join(sub_dir, name)
            dst = os.path.join(segment_dir, name)
            if os.path.isdir(src):
                shutil.copytree(src, dst)
            else:
                shutil.copy2(src, dst)

        # Remove anything unwanted
        segment_files = Search.match_files(os.path.join('**', '*.ref'),
                                           os.path.join(
                                               '**', 'container.json'),
                                           root=segment_dir)
        for segment_file in segment_files:
            logger.debug('Deleting %s', segment_file)
            os.remove(segment_file)
    return True
Esempio n. 9
0
def upgrade_container_naming(root_dir, dry_run):
    legacy_files = Search.match_files(os.path.join('**', 'container.json'),
                                      root=root_dir)
    for legacy_file in legacy_files:
        logger.debug('Checking %s', legacy_file)
        upgraded_file = os.path.join(os.path.dirname(legacy_file),
                                     'segment.json')
        if os.path.isfile(upgraded_file):
            continue
        logger.info('Upgrading %s', legacy_file)
        if dry_run:
            logger.info(f'{legacy_file}->{upgraded_file}')
            continue
        shutil.copy2(legacy_file, upgraded_file)
Esempio n. 10
0
def upgrade_cmdb_repo_to_v1_1_0_state(root_dir, dry_run, target_dir):
    # Create the shared file location
    shared_dir = os.path.join(target_dir, 'shared')
    os.makedirs(shared_dir, exist_ok=True)
    # Copy across the shared files
    cf_dir = os.path.join(root_dir, 'cf')
    if os.path.isdir(cf_dir):
        sub_files = Search.list_files(cf_dir)
        for sub_file in sub_files:
            src = os.path.join(cf_dir, sub_file)
            dst = os.path.join(shared_dir, sub_file)
            logger.debug('Copying %s to %s', src, dst)
            if dry_run:
                continue
            shutil.copy2(src, dst)
    # Process each sub dir
    sub_dirs = Search.list_dirs(root_dir)
    for sub_dir in sub_dirs:
        if sub_dir == 'cf':
            continue
        environment = sub_dir
        segment_dir = os.path.join(target_dir, environment, 'default')
        cf_dir = os.path.join(root_dir, sub_dir, 'cf')
        if not os.path.isdir(cf_dir):
            continue
        os.makedirs(segment_dir, exist_ok=True)
        logger.debug('Copying %s to %s', cf_dir, segment_dir)
        if dry_run:
            continue
        for name in Search.list_all(cf_dir):
            src = os.path.join(cf_dir, name)
            dst = os.path.join(segment_dir, name)
            if os.path.isdir(src):
                shutil.copytree(src, dst)
            else:
                shutil.copy2(src, dst)
Esempio n. 11
0
def cleanup_cmdb_repo_to_v1_1_0(root_dir, dry_run):
    for source in UPGRADE_V1_1_0_SOURCES:
        source_dirs = Search.match_dirs(os.path.join('**', source),
                                        root=root_dir)
        for source_dir in source_dirs:
            target_dir = os.path.join(os.path.dirname(source_dir),
                                      UPGRADE_V1_1_0_SOURCES[source])
            logger.debug('Checking %s', source_dir)
            logger.debug('Target dir %s', target_dir)
            if not os.path.isdir(target_dir):
                continue
            logger.info('%sDeleting %s', dry_run, source_dir)
            if dry_run:
                continue
            shutil.rmtree(source_dir)
    return True
Esempio n. 12
0
def upgrade_cmdb_repo_to_v1_2_0(root_dir, dry_run):
    legacy_files = Search.match_files(os.path.join('**', 'container_*.ftl'),
                                      root=root_dir)
    for legacy_file in legacy_files:
        logger.debug('Checking %s', legacy_file)
        replacement_filename = os.path.basename(legacy_file).replace(
            'container_', 'fragment_')
        replacement_file = os.path.join(os.path.dirname(legacy_file),
                                        replacement_filename)
        if os.path.isfile(replacement_file):
            continue
        logger.info('%sRenaming %s to %s', dry_run, legacy_file,
                    replacement_file)
        if dry_run:
            continue
        shutil.move(legacy_file, replacement_file)
    return True
Esempio n. 13
0
def upgrade_credentials(root_dir, dry_run):
    legacy_files = Search.match_files(os.path.join('**', 'credentials.json'),
                                      root=root_dir)
    for legacy_file in legacy_files:
        logger.debug('Checking %s', legacy_file)
        with open(legacy_file, 'rt') as f:
            legacy_data = json.load(f)
            credentials = legacy_data.get('Credentials')
            if credentials is None:
                return
        logger.info('Upgrading %s', legacy_file)
        upgraded_data = json.dumps(credentials or [], indent=4)
        if dry_run:
            logger.info(upgraded_data)
            continue
        with open(legacy_file, 'wt+') as f:
            f.write(upgraded_data)
Esempio n. 14
0
def upgrade_shared_build_ref(root_dir, dry_run):
    legacy_files = Search.match_files(os.path.join('**', '*.ref'),
                                      root=root_dir)
    for legacy_file in legacy_files:
        if os.path.basename(legacy_file) == 'build.ref':
            continue
        logger.debug('Checking %s', legacy_file)
        upgraded_file = os.path.join(os.path.dirname(legacy_file),
                                     'shared_build.json')
        if os.path.isfile(upgraded_file):
            continue
        logger.info('Upgrading %s', legacy_file)
        with open(legacy_file, 'rt') as f:
            build_ref = f.read()
        upgraded_data = json.dumps({'Reference': build_ref}, indent=4)
        if dry_run:
            logger.info(upgraded_data)
            continue
        with open(upgraded_file, 'wt+') as f:
            f.write(upgraded_data)
Esempio n. 15
0
def upgrade_cmdb_repo_to_v2_0_1(root_dir, dry_run):
    # Reorganise state files into a directory tree based on deployment unit and placement
    #
    # The format of the state tree will follow the pattern
    # state/{df}/{env}/{seg}/{du}/{placement}
    #
    # Delete definition files because their file name contains the occurrence name not the
    # deployment unit. They will be regenerated into the correct dir on the next build.
    state_dirs = Search.match_dirs(os.path.join('**', 'state'), root=root_dir)
    for state_dir in state_dirs:
        deployment_frameworks = Search.match_dirs('*', root=state_dir)
        for df_dir in deployment_frameworks:
            deployment_framework = os.path.basename(df_dir)
            logger.debug('%sChecking %s deployment_framework', dry_run,
                         deployment_framework)
            state_files = Search.match_files(os.path.join('**', '*'),
                                             root=df_dir)
            for state_file in state_files:
                state_basename = os.path.basename(state_file)
                state_dirname = os.path.dirname(state_file)
                stack_deployment_unit = ""
                # Filename format varies with deployment framework
                pattern_1 = r"([a-z0-9]+)-(.+)-([a-z][a-z0-9]+)-([a-z]{2}-[a-z]+-[1-9])(-pseudo)?-(.+)"
                pattern_2 = r"([a-z0-9]+)-(.+)-([a-z][a-z0-9]+)-(eastus|australiaeast|australiasoutheast|australiacentral|australiacentral2)(-pseudo)?-(.+)"  # noqa
                match = re.match(pattern_1, state_basename) or re.match(
                    pattern_2, state_basename)
                stack_level = ''
                stack_deployment_unit = ''
                stack_region = ''
                if match:
                    stack_level = match.group(1)
                    stack_deployment_unit = match.group(2)
                    stack_region = match.group(4)
                if not stack_deployment_unit:
                    # Legacy account formats
                    match = re.match(
                        r"account-([a-z][a-z0-9]+)-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                        state_basename)
                    if match:
                        stack_level = "account"
                        stack_deployment_unit = match.group(1)
                        stack_region = match.group(2)
                    match = re.match(r"account-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                                     state_basename)
                    if match:
                        stack_level = "account"
                        stack_deployment_unit = "s3"
                        stack_region = match.group(1)
                if not stack_deployment_unit:
                    # Legacy product formats
                    match = re.match(r"product-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                                     state_basename)
                    if match:
                        stack_level = "product"
                        stack_deployment_unit = "cmk"
                        stack_region = match.group(1)
                if not stack_deployment_unit:
                    # Legacy segment formats
                    match = re.match(r"seg-key-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                                     state_basename)
                    if match:
                        stack_level = "seg"
                        stack_deployment_unit = "cmk"
                        stack_region = match.group(1)
                    match = re.match(
                        r"cont-([a-z][a-z0-9]+)-([a-z]{2}-[a-z]+-[1-9])-(.+)",
                        state_basename)
                    if match:
                        stack_level = "seg"
                        stack_deployment_unit = match.group(1)
                        stack_region = match.group(2)
                if not stack_deployment_unit:
                    logger.warn(
                        '%sIgnoring %s, doesn\'t match one of the expected state filename formats',
                        dry_run, state_basename)
                    continue
                if stack_level == 'defn':
                    # Definition files are copied on every template creation
                    logger.info('%sDeleting %s', dry_run, state_file)
                    if dry_run:
                        continue
                    os.remove(state_file)
                else:
                    # Add deployment unit based subdirectories
                    if stack_deployment_unit in state_dirname:
                        logger.debug('%sIgnoring %s, already moved', dry_run,
                                     state_file)
                    else:
                        du_dir = format_unit_cf_dir(state_dirname, stack_level,
                                                    stack_deployment_unit, '',
                                                    stack_region)
                        src = state_file
                        dst = os.path.join(du_dir, state_basename)
                        logger.info('%sMoving %s to %s', dry_run, src, dst)
                        if dry_run:
                            continue
                        if not os.path.isdir(du_dir):
                            os.makedirs(du_dir, exist_ok=True)
                        shutil.move(src, dst)
    return True
Esempio n. 16
0
def upgrade_cmdb_repo_to_v2_0_0(root_dir, dry_run):
    # Reorganise cmdb to make it easier to manage via branches and dynamic cmdbs
    #
    # State is now in its own directory at the same level as config and infrastructure
    # Solutions is now under infrastructure
    # Builds are separated from settings and are now under infrastructure
    # Operations are now in their own directory at same level as config and
    # infrastructure. For consistency with config, a settings subdirectory has been
    # added.
    #
    # With this layout,
    # - infrastructure should be the same across environments assuming no builds
    #   are being promoted
    # - product and operations settings are managed consistently
    # - all the state info is cleanly separated (so potentially in its own repo)
    #
    # /config/settings
    # /operations/settings
    # /infrastructure/solutions
    # /infrastructure/builds
    # /state/cf
    # /state/cot
    #
    # If config and infrastructure are not in the one repo, then the upgrade must
    # be performed manually and the cmdb version manually updated
    config_dirs = Search.match_dirs(os.path.join('**', 'config'),
                                    root=root_dir)
    for config_dir in config_dirs:
        base_dir = os.path.dirname(config_dir)
        solutions_dir = os.path.join(config_dir, 'solutionsv2')
        settings_dir = os.path.join(config_dir, 'settings')
        infrastructure_dir = os.path.join(base_dir, 'infrastructure')
        state_dir = os.path.join(base_dir, 'state')
        operations_dir = os.path.join(base_dir, 'operations')
        state_subdirs = [
            os.path.join(infrastructure_dir, 'cf'),
            os.path.join(infrastructure_dir, 'cot')
        ]
        if not os.path.isdir(infrastructure_dir):
            logger.warn(
                '%sUpdate to v2.0.0 for %s must be manually performed for split cmdb repos',
                dry_run, config_dir)
            continue
        logger.debug('%sChecking %s', dry_run, base_dir)
        # Move the state into its own top level tree
        os.makedirs(base_dir, exist_ok=True)
        for state_subdir in state_subdirs:
            if os.path.isdir(state_subdir):
                src = state_subdir
                dst = os.path.join(state_dir, os.path.basename(state_subdir))
                logger.info('%sMoving %s to %s', dry_run, src, dst)
                if dry_run:
                    continue
                shutil.move(src, dst)
        # Move operations settings into their own top level tree
        orig_operations_settings_dir = os.path.join(infrastructure_dir,
                                                    'operations')
        new_operation_settings_dir = os.path.join(operations_dir, 'settings')
        if os.path.isdir(orig_operations_settings_dir):
            logger.info('%sMoving %s to %s', dry_run,
                        orig_operations_settings_dir,
                        new_operation_settings_dir)
            if dry_run:
                continue
            if not os.path.isdir(new_operation_settings_dir):
                os.makedirs(operations_dir, exist_ok=True)
                shutil.move(orig_operations_settings_dir,
                            new_operation_settings_dir)
        # Copy the solutions tree from config to infrastructure and rename
        if os.path.isdir(solutions_dir):
            logger.info('%sCopying %s to %s', dry_run, solutions_dir,
                        infrastructure_dir)
            if not dry_run:
                # Leave existing solutions dir in place as it may be the current directory
                src = solutions_dir
                dst = os.path.join(infrastructure_dir, os.path.basename(src))
                shutil.copytree(src, dst)
            src = os.path.join(infrastructure_dir, 'solutionsv2')
            dst = os.path.join(infrastructure_dir, 'solutions')
            logger.info('%sRenaming %s to %s', dry_run, src, dst)
            if not dry_run:
                shutil.move(src, dst)
        # Copy the builds into their own tree
        builds_dir = os.path.join(infrastructure_dir, 'builds')
        if not os.path.isdir(builds_dir):
            src = settings_dir
            dst = os.path.join(builds_dir, os.path.basename(src))
            logger.info('%sCopying %s to %s', dry_run, src, dst)
            if not dry_run:
                shutil.copytree(src, dst)
        # Remove the build files from the settings tree
        # Blob will pick up build references and shared builds
        logger.info('%sCleaning the settings tree', dry_run)
        setting_files = Search.match_files(os.path.join('**', '*build.json'),
                                           root=settings_dir)
        for setting_file in setting_files:
            logger.info('%sDeleting %s', dry_run, setting_file)
            if dry_run:
                continue
            os.remove(setting_file)
        # Build tree should only contain build references and shared builds
        logger.info('%sCleaning the builds tree', dry_run)
        if dry_run:
            build_files = Search.match_files(os.path.join('**', '*'),
                                             root=settings_dir)
        else:
            build_files = Search.match_files(os.path.join('**', '*'),
                                             root=builds_dir)
        build_files = [
            filename for filename in build_files
            if not filename.endswith('build.json')
        ]
        for build_file in build_files:
            logger.info('%sDeleting %s', dry_run, build_file)
            if dry_run:
                continue
            os.remove(build_file)

    return True
Esempio n. 17
0
def upgrade_cmdb_repo_to_v1_3_1(root_dir, dry_run):
    # Find accounts
    account_files = Search.match_files(os.path.join('**', 'account.json'),
                                       root=root_dir)
    account_mappings = dict()
    for account_file in account_files:
        with open(account_file, 'rt') as f:
            account = json.load(f)
            account_mappings[account['Account']
                             ['AWSId']] = account['Account']['Id']
    cf_dirs = Search.match_dirs(os.path.join('**', 'cf'), root=root_dir)
    for cf_dir in cf_dirs:
        cmk_stacks = Search.match_files(os.path.join(
            '**', 'seg-cmk-*[0-9]-stack.json'),
                                        root=cf_dir)
        for cmk_stack in cmk_stacks:
            logger.info('Looking for CMK account in %s', cmk_stack)
            with open(cmk_stack, 'rt') as f:
                cmk_stack_data = json.load(f)
            stack_outputs = cmk_stack_data['Stacks'][0]['Outputs']
            cmk_account = None
            for output in stack_outputs:
                if output['OutputKey'] == 'Account':
                    cmk_account = output['OutputValue']
            if cmk_account:
                cmk_account_id = account_mappings[cmk_account]
                cmk_path = os.path.dirname(cmk_stack)
                segment_cf = Search.match_files(os.path.join('**', '*'),
                                                root=cmk_path)
                for cf_file in segment_cf:
                    parsed_stack = parse_stack_filename(cf_file)
                    stack_dir = os.path.dirname(cf_file)
                    if not parsed_stack['stack_account']:
                        cf_basename = os.path.basename(cf_file)
                        new_cf_basename = cf_basename.replace(
                            f'-{parsed_stack["stack_region"]}-',
                            f'-{cmk_account_id}-{parsed_stack["stack_region"]}-'
                        )
                        move_file = True
                        new_cf_file = os.path.join(stack_dir, new_cf_basename)
                        if cf_basename != new_cf_basename and cmk_account_id not in cf_basename:
                            if os.path.isfile(new_cf_file):
                                if filecmp.cmp(cf_file, new_cf_file, False):
                                    move_file = False
                                else:
                                    logger.fatal(
                                        'Rename failed - %s already exists. Manual intervention necessary.',
                                        new_cf_file)
                                    return False
                        if cf_file == new_cf_file:
                            logger.debug('Skipping %s, path is not changed',
                                         new_cf_file)
                            continue
                        if move_file:
                            logger.debug('Moving %s to %s', cf_file,
                                         new_cf_file)
                        else:
                            logger.warn('%s already upgraded - removing',
                                        cf_file)
                        if dry_run:
                            continue
                        if move_file:
                            shutil.move(cf_file, new_cf_file)
                        else:
                            os.remove(cf_file)
    return True
Esempio n. 18
0
def upgrade_cmdb_repo_to_v1_3_0(root_dir, dry_run):
    # Find accounts
    account_files = Search.match_files(os.path.join('**', 'account.json'),
                                       root=root_dir)
    account_mappings = dict()
    for account_file in account_files:
        with open(account_file, 'rt') as f:
            account = json.load(f)
            account_mappings[account['Account']
                             ['AWSId']] = account['Account']['Id']
    cf_dirs = Search.match_dirs(os.path.join('**', 'cf'), root=root_dir)
    for cf_dir in cf_dirs:
        cmk_stacks = Search.match_files(os.path.join(
            '**', 'seg-cmk-*[0-9]-stack.json'),
                                        root=cf_dir)
        for cmk_stack in cmk_stacks:
            logger.info('Looking for CMK account in %s', cmk_stack)
            with open(cmk_stack, 'rt') as f:
                cmk_stack_data = json.load(f)
            stack_outputs = cmk_stack_data['Stacks'][0]['Outputs']
            cmk_account = None
            cmk_region = None
            for output in stack_outputs:
                if output['OutputKey'] == 'Account':
                    cmk_account = output['OutputValue']
                elif output['OutputKey'] == 'Region':
                    cmk_region = output['OutputValue']
            if cmk_account:
                cmk_account_id = account_mappings[cmk_account]
                cmk_path = os.path.dirname(cmk_stack)
                segment_stacks = Search.match_files(os.path.join(
                    '**', '*stack.json'),
                                                    root=cmk_path)
                for stack_file in segment_stacks:
                    parsed_stack = parse_stack_filename(stack_file)
                    stack_dir = os.path.dirname(stack_file)
                    stack_filename = os.path.basename(stack_file)
                    with open(stack_file, 'rt') as f:
                        stack_data = json.load(f)
                    stack_outputs = stack_data['Stacks'][0]['Outputs']
                    stackoutput_account = None
                    stackoutput_region = None
                    for output in stack_outputs:
                        if output['OutputKey'] == 'Account':
                            stackoutput_account = output['OutputValue']
                        elif output['OutputKey'] == 'Region':
                            stackoutput_region = output['OutputValue']
                    if not stackoutput_account:
                        logger.debug('Adding Account Output to %s', stack_file)
                        for stack in stack_data['Stacks']:
                            stack['Outputs'].append({
                                'OutputKey': 'Account',
                                'OutputValue': cmk_account
                            })
                    if not stackoutput_region:
                        logger.debug('Adding Region Output to %s', stack_file)
                        for stack in stack_data['Stacks']:
                            stack['Outputs'].append({
                                'OutputKey':
                                'Region',
                                'OutputValue':
                                parsed_stack['stack_region']
                            })
                    if not stackoutput_region or not stackoutput_account:
                        with open(stack_file, 'wt') as f:
                            json.dump(stack_data, f, indent=4)
                    if not parsed_stack['stack_account']:
                        new_stack_file_name = os.path.basename(
                            stack_file
                        ).replace(
                            f'-{parsed_stack["stack_region"]}-',
                            f'-{cmk_account_id}-{parsed_stack["stack_region"]}-'
                        )
                        if stack_filename != new_stack_file_name and cmk_account_id not in stack_filename:
                            src = stack_file
                            dst = os.path.join(stack_dir, new_stack_file_name)
                            logger.debug('Moving %s to %s', src, dst)
                            if dry_run:
                                continue
                            shutil.move(src, dst)
                # Rename SSH keys to include Account/Region
                operations_path = cmk_path.replace(
                    os.path.join('infrastructure', 'cf'),
                    os.path.join('infrastructure', 'operations'))
                logger.info('Checking for SSH Keys in %s', operations_path)
                pem_files = Search.match_files(os.path.join(
                    '**', '.aws-ssh*.pem*'),
                                               root=operations_path)
                for pem_file in pem_files:
                    pem_dir = os.path.dirname(pem_file)
                    pem_basename = os.path.basename(pem_file)
                    new_basename = pem_basename.replace(
                        'aws-', f'aws-{cmk_account_id}-{cmk_region}-')
                    # Move the pem files to make them invisible to the generation process
                    src = pem_file
                    dst = os.path.join(pem_dir, new_basename)
                    logger.debug('Moving %s to %s', src, dst)
                    if dry_run:
                        continue
                    shutil.move(src, dst)
    return True
Esempio n. 19
0
def test_search():
    with tempfile.TemporaryDirectory() as temp_dir:
        path = "/test/path/split"
        assert Search.split_path(path) == ['/', 'test', 'path', 'split']
        path = "test/path/split"
        assert Search.split_path(path) == ['test', 'path', 'split']

        test_directory_fullpath = os.path.join(temp_dir, 'test/directory')
        test_directory_1_fullpath = os.path.join(temp_dir, 'test/directory/1')
        test_directory_2_fullpath = os.path.join(temp_dir, 'test/directory/2')
        test_filename_fullpath = os.path.join(test_directory_fullpath,
                                              'node-0')
        test_filename_1_fullpath = os.path.join(test_directory_1_fullpath,
                                                'node-1')
        test_filename_2_fullpath = os.path.join(test_directory_2_fullpath,
                                                'node-2')
        os.makedirs(test_directory_1_fullpath)
        os.makedirs(test_directory_2_fullpath)
        os.mknod(test_filename_fullpath)
        os.mknod(test_filename_1_fullpath)
        os.mknod(test_filename_2_fullpath)

        assert os.path.exists(test_filename_fullpath)
        assert os.path.exists(test_filename_1_fullpath)
        assert os.path.exists(test_filename_2_fullpath)

        with pytest.raises(ValueError):
            ContextSearch(os.path.relpath(temp_dir, test_directory_fullpath))

        with pytest.raises(ValueError):
            ContextSearch(test_filename_fullpath)

        with pytest.raises(ValueError):
            ContextSearch("/directory/doesnt/exist")

        assert Search.exists(test_directory_fullpath,
                             'node-0') == test_filename_fullpath
        assert ContextSearch(test_directory_fullpath).exists(
            'node-0') == test_filename_fullpath
        assert Search.exists(test_directory_1_fullpath,
                             'node-1') == test_filename_1_fullpath
        assert ContextSearch(test_directory_1_fullpath).exists(
            'node-1') == test_filename_1_fullpath
        assert Search.exists(test_directory_2_fullpath,
                             'node-2') == test_filename_2_fullpath
        assert ContextSearch(test_directory_2_fullpath).exists(
            'node-2') == test_filename_2_fullpath

        assert Search.exists(test_directory_1_fullpath, 'node-0',
                             up=1) == test_filename_fullpath
        assert ContextSearch(test_directory_1_fullpath).exists(
            'node-0', up=1) == test_filename_fullpath
        assert Search.exists(test_directory_2_fullpath, 'node-0',
                             up=1) == test_filename_fullpath
        assert ContextSearch(test_directory_2_fullpath).exists(
            'node-0', up=1) == test_filename_fullpath

        assert Search.isfile(test_directory_fullpath,
                             'node-0') == test_filename_fullpath
        assert ContextSearch(test_directory_fullpath).isfile(
            'node-0') == test_filename_fullpath

        assert Search.isfile(test_directory_1_fullpath, 'node-0',
                             up=1) == test_filename_fullpath
        assert ContextSearch(test_directory_1_fullpath).isfile(
            'node-0', up=1) == test_filename_fullpath

        assert (Search.isdir(test_directory_fullpath,
                             os.path.basename(test_directory_1_fullpath)) ==
                test_directory_1_fullpath)
        assert (ContextSearch(test_directory_fullpath).isdir(
            os.path.basename(test_directory_1_fullpath)) ==
                test_directory_1_fullpath)
        assert (Search.isdir(test_directory_1_fullpath,
                             os.path.basename(test_directory_fullpath),
                             up=2) == test_directory_fullpath)
        assert (ContextSearch(test_directory_1_fullpath).isdir(
            os.path.basename(test_directory_fullpath),
            up=2) == test_directory_fullpath)

        assert Search.upwards(test_directory_1_fullpath,
                              'node-0') == test_filename_fullpath
        assert ContextSearch(test_directory_2_fullpath).upwards(
            'node-0') == test_filename_fullpath

        assert Search.downwards(temp_dir,
                                'node-1') == [test_filename_1_fullpath]
        assert ContextSearch(temp_dir).downwards('node-2') == [
            test_filename_2_fullpath
        ]

        assert Search.basename(test_directory_fullpath) == os.path.basename(
            test_directory_fullpath)
        assert ContextSearch(test_directory_fullpath).basename(
        ) == os.path.basename(test_directory_fullpath)

        assert Search.basename(
            test_directory_1_fullpath,
            up=1) == os.path.basename(test_directory_fullpath)
        assert ContextSearch(test_directory_2_fullpath).basename(
            up=1) == os.path.basename(test_directory_fullpath)

        assert Search.parent(test_directory_1_fullpath,
                             up=1) == test_directory_fullpath
        assert ContextSearch(test_directory_2_fullpath).parent(
            up=1) == test_directory_fullpath

        assert Search.parent(test_directory_1_fullpath, up=3) == temp_dir
        assert ContextSearch(test_directory_2_fullpath).parent(
            up=3) == temp_dir

        assert (Search.cut("/p/re/fix/path/to/something/suf/fix",
                           prefix="/p/re/fix",
                           suffix="suf/fix") == "path/to/something")
        assert (Search.cut("/p/re/fix/path/to/something/suf/fix",
                           prefix="/p/re/fix") == "path/to/something/suf/fix")
        assert (Search.cut("/p/re/fix/path/to/something/suf/fix",
                           suffix="suf/fix") == "/p/re/fix/path/to/something")
Esempio n. 20
0
def upgrade_cmdb_repo_to_v1_1_0(root_dir, dry_run):
    for source in UPGRADE_V1_1_0_SOURCES:
        source_dirs = Search.match_dirs(os.path.join('**', source),
                                        root=root_dir)
        for source_dir in source_dirs:
            target_dir = os.path.join(os.path.dirname(source_dir),
                                      UPGRADE_V1_1_0_SOURCES[source])
            logger.debug('Checking %s', source_dir)
            if os.path.isdir(target_dir):
                continue
            logger.info('Converting %s into %s', source_dir, target_dir)
            if source == 'aws':
                upgrade_cmdb_repo_to_v1_1_0_state(source_dir, dry_run,
                                                  target_dir)
            else:
                upgrade_cmdb_repo_to_v1_1_0_settings(source_dir, dry_run,
                                                     target_dir)
            if dry_run:
                continue
            # Special processing
            if source == 'solutions':
                # Shared solution files are specific to the default segment
                shared_default_dir = os.path.join(target_dir, 'shared',
                                                  'default')
                os.makedirs(shared_default_dir, exist_ok=True)
                target_shared_dir = os.path.join(target_dir, 'shared')
                solution_files = Search.list_files(target_shared_dir)
                for solution_file in solution_files:
                    src = os.path.join(target_shared_dir, solution_file)
                    dst = os.path.join(shared_default_dir, solution_file)
                    logger.debug('Moving %s to %s', src, dst)
                    shutil.move(src, dst)
                # Process environments
                segment_files = Search.match_files(os.path.join(
                    '**', 'segment.json'),
                                                   root=target_dir)
                for segment_file in segment_files:
                    segment_dir = os.path.dirname(segment_file)
                    environment_dir = os.path.dirname(segment_dir)
                    # Add environment.json file
                    with open(segment_file, 'rt') as f:
                        segment = json.load(f)
                    environment_id = segment.get('Segment',
                                                 {}).get('Environment')
                    environment_file = os.path.join(environment_dir,
                                                    'environment.json')
                    logger.debug('Creating %s', environment_file)
                    with open(environment_file, 'wt+') as f:
                        json.dump({'Environment': {'Id': environment_id}}, f)
                    logger.debug('Cleaning %s', segment_file)
                    segment_legacy_keys = [
                        'Id', 'Name', 'Title', 'Environment'
                    ]
                    for segment_legacy_key in segment_legacy_keys:
                        try:
                            del segment['Segment'][segment_legacy_key]
                        except KeyError:
                            pass
                    with open(segment_file, 'wt') as f:
                        json.dump(segment, f)
                shared_segment_file = os.path.join(shared_default_dir,
                                                   'segment.json')
                logger.debug('Creating %s', shared_segment_file)
                with open(shared_segment_file, 'wt+') as f:
                    json.dump({'Segment': {'Id': 'default'}}, f)
            elif source == 'credentials':
                pem_files = Search.match_files(os.path.join(
                    '**', 'aws-ssh*.pem'),
                                               root=target_dir)
                for pem_file in pem_files:
                    filename = os.path.basename(pem_file)
                    segment_dir = os.path.dirname(pem_file)
                    # Move the pem files to make them invisible to the generation process
                    src = pem_file
                    dst = os.path.join(segment_dir, '.' + filename)
                    logger.debug('Moving %s to %s', src, dst)
                    shutil.move(src, dst)
                    segment_ignore_file = os.path.join(segment_dir,
                                                       '.gitignore')
                    if not os.path.isfile(segment_ignore_file):
                        logger.debug('Creaging %s', segment_ignore_file)
                        ignore_list = ['*.plaintext', '*.decrypted', '*.ppk']
                        with open(segment_ignore_file, 'wt+') as f:
                            f.write('\n'.join(ignore_list))
    return True
Esempio n. 21
0
def set_context(cwd=None, environment_obj=None):
    e = environment_obj  # shortcut
    l = Environment()  # local env
    if e.GENERATION_CONTEXT_DEFINED:
        return
    e.GENERATION_CONTEXT_DEFINED = True
    l.GENERATION_CONTEXT_DEFINED_LOCAL = True
    if not e.GENERATION_TMPDIR:
        # *******************************************
        # * TODO: decide how to do temp dir cleanup *
        # *******************************************
        e.GENERATION_TMPDIR = tempfile.mkdtemp()
    e.GENERATION_DATA_DIR = find_gen3_root_dir(cwd)
    if not e.GENERATION_DATA_DIR:
        raise Exception("Can't locate the root of the directory tree.")
    if not e.GENERATION_NO_CMDB_CHECK:
        if not upgrade_cmdb(e.GENERATION_DATA_DIR, '', '',
                            e.GENERATION_MAX_CMDB_UPGRADE_VERSION):
            raise Exception('CMDB upgrade failed.')
        if not cleanup_cmdb(e.GENERATION_DATA_DIR, '', '',
                            e.GENERATION_MAX_CMDB_UPGRADE_VERSION):
            raise Exception('CMDB cleanup failed.')
    e.CACHE_DIR = os.path.join(e.GENERATION_DATA_DIR, 'cache')
    os.makedirs(e.CACHE_DIR, exist_ok=True)

    l.TEMPLATE_COMPOSITES = ('account', 'fragment')

    for composite in l.TEMPLATE_COMPOSITES:
        key = f'COMPOSITE_{composite.upper()}'
        e[key] = os.path.join(e.CACHE_DIR, f'{key.lower()}.ftl')

        if (not e.GENERATION_USE_CACHE and not e.GENERATION_USE_FRAGMENTS_CACHE
                or not Search.isfile(e.CACHE_DIR, 'composite_account.ftl')):
            l[f'{composite.lower()}_array'] = []
            # ********************************************
            # * TODO: add legacy start fragments section *
            # ********************************************
    # Check if the current directory gives any clue to the context
    # Accommodate both pre cmdb v2.0.0 where segment/environment in the config tree
    # and post v2.0.0 where they are in the infrastructure tree
    l.solutions_ancestor_dir = Search.upwards(cwd, 'solutions')
    l.solutionsv2_ancestor_dir = Search.upwards(cwd, 'solutionsv2')
    if not l.solutions_ancestor_dir and not l.solutionsv2_ancestor_dir:
        l.infrastructure_dir = cwd.replace('settings', 'solutions')
        l.infrastructure_dir = l.infrastructure_dir.replace(
            'operations', 'infrastructure')
        l.infrastructure_dir = l.infrastructure_dir.replace(
            'config', 'infrastructure')
        if os.path.isdir(l.infrastructure_dir):
            cwd = l.infrastructure_dir
    if Search.isfile(cwd, 'segment.json'):
        e.LOCATION = e.LOCATION or 'segment'
        e.SEGMENT = Search.basename(cwd)
        if Search.isfile(cwd, 'environment.json', up=1):
            cwd = Search.parent(cwd, up=1)
        else:
            e.ENVIRONMENT = e.SEGMENT
            e.SEGMENT = 'default'
            cwd = os.path.join(Search.parent(cwd, up=3), 'config')
    if Search.isfile(cwd, 'environment.json'):
        e.LOCATION = e.LOCATION or 'environment'
        e.ENVIRONMENT = Search.basename(cwd)
        cwd = os.path.join(Search.parent(cwd, up=3), 'config')
    if Search.isfile(cwd, 'account.json'):
        # account directory
        # We check it before checking for a product as the account directory
        # also acts as a product directory for shared infrastructure
        # An account directory may also have no product information e.g.
        # in the case of production environments in dedicated accounts.
        e.LOCATION = e.LOCATION or 'account'
    if Search.isfile(cwd, 'product.json'):
        if e.LOCATION == 'account':
            e.LOCATION = 'account|product'
        else:
            e.LOCATION = e.LOCATION or 'product'
        e.PRODUCT = Search.basename(cwd)
        if e.PRODUCT == 'config':
            e.PRODUCT = Search.basename(cwd, up=1)
    if Search.isfile(cwd, 'integrator.json'):
        e.LOCATION = e.LOCATION or 'integrator'
        e.INTEGRATOR = Search.basename(cwd)
    if Search.isfile(cwd, 'root.json') or Search.isdir(
            cwd, 'config') and Search.isdir(cwd, 'infrastructure'):
        e.LOCATION = 'root'
    cwd = e.GENERATION_DATA_DIR
    if not e.ACCOUNT:
        e.ACCOUNT = Search.basename(e.GENERATION_DATA_DIR)
    # Analyse directory structure
    find_gen3_dirs(e, e.GENERATION_DATA_DIR)
    # Build the composite solution ( aka blueprint)
    e.GENERATION_INPUT_SOURCE = e.GENERATION_INPUT_SOURCE or 'composite'
    if e.GENERATION_INPUT_SOURCE == 'composite':
        l.blueprint_alternate_dirs = (e.SEGMENT_SOLUTIONS_DIR,
                                      e.ENVIRONMENT_SHARED_SOLUTIONS_DIR,
                                      e.SEGMENT_SHARED_SOLUTIONS_DIR,
                                      e.PRODUCT_SHARED_SOLUTIONS_DIR)
        e.COMPOSITE_BLUEPRINT = os.path.join(e.CACHE_DIR,
                                             'composite_blueprint.json')
        if (not e.GENERATION_USE_CACHE and not e.GENERATION_USE_BLUEPRINT_CACHE
                or not os.path.isfile(e.COMPOSITE_BLUEPRINT)):
            l.blueprint_array = []
            for blueprint_alternate_dir in l.blueprint_alternate_dirs:
                if not blueprint_alternate_dir or not os.path.isdir(
                        blueprint_alternate_dir):
                    continue
                l.blueprint_array += Search.match_files(
                    os.path.join('**', 'segment*.json'),
                    os.path.join('**', 'environment*.json'),
                    os.path.join('**', 'solution*.json'),
                    os.path.join('**', 'domains*.json'),
                    os.path.join('**', 'ipaddressgroups*.json'),
                    os.path.join('**', 'countrygroups*.json'),
                    root=blueprint_alternate_dir)
            l.blueprint_array += Search.match_files(
                os.path.join('**', 'domains*.json'),
                os.path.join('**', 'ipaddressgroups*.json'),
                os.path.join('**', 'countrygroups*.json'),
                os.path.join('**', 'profiles*.json'),
                os.path.join('**', 'product*.json'),
                root=e.PRODUCT_DIR)
            l.blueprint_array += Search.match_files(
                os.path.join('**', 'domains*.json'),
                os.path.join('**', 'ipaddressgroups*.json'),
                os.path.join('**', 'countrygroups*.json'),
                os.path.join('**', 'account*.json'),
                root=e.ACCOUNT_DIR)
            l.blueprint_array += Search.match_files(
                os.path.join('**', 'domains*.json'),
                os.path.join('**', 'ipaddressgroups*.json'),
                os.path.join('**', 'countrygroups*.json'),
                os.path.join('**', 'profiles*.json'),
                os.path.join('**', 'tenant*.json'),
                root=e.TENANT_DIR)
            if not l.blueprint_array:
                blueprint = {}
                with open(e.COMPOSITE_BLUEPRINT, 'wt+') as f:
                    f.write('{}')
            else:
                #  merging blueprint components
                blueprint = {}
                for blueprint_component_json in l.blueprint_array:
                    with open(blueprint_component_json, 'rt') as f:
                        deep_dict_update(blueprint, json.load(f))
                with open(e.COMPOSITE_BLUEPRINT, 'wt+') as f:
                    json.dump(blueprint, f, indent=4, sort_keys=True)

            # Extract key settings from the composite solution
            tenant = blueprint.get('Tenant', {})
            account = blueprint.get('Account', {})
            product = blueprint.get('Product', {})
            segment = blueprint.get('Segment', {})
            e.TID = tenant.get('Id')
            e.TENANT = tenant.get('Name')
            e.AID = account.get('Id')
            e.AWSID = account.get('AWSId')
            e.ACCOUNT_REGION = account.get('Region')
            e.PID = product.get('Id')
            e.PRODUCT_REGION = product.get('Region')
            e.DEPLOYMENTUNIT_REGION = product.get(e.DEPLOYMENT_UNIT,
                                                  {}).get('Region')
            e.SID = segment.get('Id')

            e.COMPONENT_REGION = e.DEPLOYMENTUNIT_REGION or e.PRODUCT_REGION
            e.REGION = e.REGION or e.COMPONENT_REGION
            # Perform a few consistency checks
            if not e.REGION:
                raise Exception(
                    "The region must be defined in the Product blueprint section."
                )

            l.BLUEPRINT_ACCOUNT = account.get('Name') or account.get('Id')
            l.BLUEPRINT_PRODUCT = product.get('Name') or product.get('Id')
            l.BLUEPRINT_SEGMENT = segment.get('Name') or segment.get('Id')
            if e.ACCOUNT and l.BLUEPRINT_ACCOUNT != 'Account' and e.ACCOUNT != l.BLUEPRINT_ACCOUNT:
                raise Exception(
                    f"Blueprint account of {l.BLUEPRINT_ACCOUNT} doesn't match expected value of {e.ACCOUNT}"
                )
            if e.PRODUCT and l.BLUEPRINT_PRODUCT != 'Product' and e.PRODUCT != l.BLUEPRINT_PRODUCT:
                raise Exception(
                    f"Blueprint product of {l.BLUEPRINT_PRODUCT} doesn't match expected value of ${e.PRODUCT}"
                )
            if e.SEGMENT and l.BLUEPRINT_SEGMENT != 'Segment' and e.SEGMENT != l.BLUEPRINT_SEGMENT:
                raise Exception(
                    f"Blueprint segment of {e.BLUEPRINT_SEGMENT} doesn't match expected value of {e.SEGMENT}"
                )
    # Set default AWS credentials if available (hook from Jenkins framework)
    l.CHECK_AWS_ACCESS_KEY_ID = (e.AWS_ACCESS_KEY_ID
                                 or e.ACCOUNT_TEMP_AWS_ACCESS_KEY_ID
                                 or e[e.ACCOUNT_AWS_ACCESS_KEY_ID_VAR])
    if l.CHECK_AWS_ACCESS_KEY_ID:
        e.AWS_ACCESS_KEY_ID = l.CHECK_AWS_ACCESS_KEY_ID

    l.CHECK_AWS_SECRET_ACCESS_KEY = (e.AWS_SECRET_ACCESS_KEY
                                     or e.ACCOUNT_TEMP_AWS_SECRET_ACCESS_KEY
                                     or e[e.ACCOUNT_AWS_SECRET_ACCESS_KEY_VAR])
    if l.CHECK_AWS_SECRET_ACCESS_KEY:
        e.AWS_SECRET_ACCESS_KEY = l.CHECK_AWS_SECRET_ACCESS_KEY

    l.CHECK_AWS_SESSION_TOKEN = e.AWS_SESSION_TOKEN or e.ACCOUNT_TEMP_AWS_SESSION_TOKEN
    if l.CHECK_AWS_SESSION_TOKEN:
        e.AWS_SESSION_TOKEN = l.CHECK_AWS_SESSION_TOKEN

    # Set the profile for IAM access if AWS credentials not in the environment
    if not e.AWS_ACCESS_KEY_ID or not e.AWS_SECRET_ACCESS_KEY:
        available_profiles = boto3.session.Session().available_profiles
        if e.ACCOUNT and e.ACCOUNT in available_profiles:
            e.AWS_DEFAULT_PROFILE = e.ACCOUNT
        if e.AID and e.AID in available_profiles:
            e.AWS_DEFAULT_PROFILE = e.AID
        if e.AWSID and e.AWSID in available_profiles:
            e.AWS_DEFAULT_PROFILE = e.AWSID
Esempio n. 22
0
def process_cmdb(root_dir, action, gen3_version, versions, dry_run):
    cmdb_git_repos = Search.match_dirs(os.path.join('**', '*.git'),
                                       root=root_dir)
    dry_run = '(Dryrun)' if dry_run else ''

    for cmdb_git_repo in cmdb_git_repos:

        cmdb_repo = os.path.dirname(cmdb_git_repo)
        cmdb_version_file = os.path.join(cmdb_repo, '.cmdb')
        current_version = ''
        pin_version = ''
        logger.debug('Checking repo %s', cmdb_repo)
        if os.path.isfile(cmdb_version_file):
            with open(cmdb_version_file, 'rt') as f:
                cmdb_version_data = json.load(f)
            current_version = cmdb_version_data.get('Version', {}).get(
                action.capitalize())
            pin_version = cmdb_version_data.get('Pin',
                                                {}).get(action.capitalize())
            logger.debug("Repo pinned at %s version %s", pin_version,
                         current_version)
        else:
            with open(cmdb_version_file, 'wt+') as f:
                json.dump({}, f)

        current_version = current_version or 'v0.0.0'
        if utils.semver_compare(current_version, versions[-1]) >= 0:
            logger.debug(
                '%s of repo "%s" to %s is not required - skipping all version checks',
                action.capitalize(), cmdb_repo, versions[-1])
            continue

        for version in versions:
            if utils.semver_compare(current_version, version) >= 0:
                logger.debug('%s of repo "%s" to %s is not required',
                             action.capitalize(), cmdb_repo, version)
            else:
                logger.info('%s%s of repo "%s" to %s required ...', dry_run,
                            action.capitalize(), cmdb_repo, version)
            if pin_version:
                if utils.semver_compare(current_version, pin_version) < 0:
                    logger.warn(
                        '%s of repo "%s" to %s prevented by pin version %s',
                        action.capitalize(), cmdb_repo, version, pin_version)
                    break
                else:
                    logger.debug(
                        '%s%s of repo "%s" to %s permitted by pin version %s',
                        dry_run, action.capitalize())
            compatibility = is_upgrade_compatible(version, gen3_version)
            if compatibility == 'incompatible':
                logger.warn((
                    '%s%s of repo "%s" to %s is not compatible with the current gen3 framework version of %s. '
                    'Skipping upgrade process ...'), dry_run,
                            action.capitalize(), cmdb_repo, version,
                            gen3_version)
                break
            elif compatibility == 'unknown':
                logger.warn((
                    '%s%s of repo "%" to %s requires the GEN3 framework version to be defined.'
                    'Skipping upgrade process ...'), dry_run,
                            action.capitalize(), cmdb_repo, version)
                break
            else:
                logger.debug('%s%s of repo "%s" to %s is compatible', dry_run,
                             action.capitalize(), cmdb_repo, version)

            cmdb_action_func = globals(
            )[f'{action.lower()}_cmdb_repo_to_{version.replace(".", "_")}']
            if cmdb_action_func(cmdb_repo, dry_run):
                if dry_run:
                    logger.debug('%sSkipping later versions', dry_run)
                    break
                logger.info('%s of repo "%s" to %s successful',
                            action.capitalize(), cmdb_repo, version)
                with open(cmdb_version_file, 'rt') as f:
                    cmdb_version_data = json.load(f)
                utils.deep_dict_update(
                    cmdb_version_data,
                    {'Version': {
                        action.capitalize(): version
                    }})
                with open(cmdb_version_file, 'wt') as f:
                    json.dump(cmdb_version_data, f)
                current_version = version

    return True