def upgrade_cmdb_repo_to_v1_1_0_settings(root_dir, dry_run, target_dir): # Create the shared file location for default segment shared_dir = os.path.join(target_dir, 'shared') os.makedirs(shared_dir, exist_ok=True) sub_files = Search.list_files(root_dir) for sub_file in sub_files: src = os.path.join(root_dir, sub_file) dst = os.path.join(shared_dir, sub_file) logger.debug('Copying %s to %s', src, dst) if dry_run: continue shutil.copy2(src, dst) sub_dirs = Search.list_dirs(root_dir) for sub_dir in sub_dirs: environment = sub_dir sub_dir = os.path.join(root_dir, sub_dir) segment_dir = os.path.join(target_dir, environment, 'default') os.makedirs(segment_dir, exist_ok=True) logger.debug('Copying %s to %s', sub_dir, segment_dir) if dry_run: continue for name in Search.list_all(sub_dir): src = os.path.join(sub_dir, name) dst = os.path.join(segment_dir, name) if os.path.isdir(src): shutil.copytree(src, dst) else: shutil.copy2(src, dst) # Remove anything unwanted segment_files = Search.match_files(os.path.join('**', '*.ref'), os.path.join( '**', 'container.json'), root=segment_dir) for segment_file in segment_files: logger.debug('Deleting %s', segment_file) os.remove(segment_file) return True
def upgrade_cmdb_repo_to_v1_1_0_state(root_dir, dry_run, target_dir): # Create the shared file location shared_dir = os.path.join(target_dir, 'shared') os.makedirs(shared_dir, exist_ok=True) # Copy across the shared files cf_dir = os.path.join(root_dir, 'cf') if os.path.isdir(cf_dir): sub_files = Search.list_files(cf_dir) for sub_file in sub_files: src = os.path.join(cf_dir, sub_file) dst = os.path.join(shared_dir, sub_file) logger.debug('Copying %s to %s', src, dst) if dry_run: continue shutil.copy2(src, dst) # Process each sub dir sub_dirs = Search.list_dirs(root_dir) for sub_dir in sub_dirs: if sub_dir == 'cf': continue environment = sub_dir segment_dir = os.path.join(target_dir, environment, 'default') cf_dir = os.path.join(root_dir, sub_dir, 'cf') if not os.path.isdir(cf_dir): continue os.makedirs(segment_dir, exist_ok=True) logger.debug('Copying %s to %s', cf_dir, segment_dir) if dry_run: continue for name in Search.list_all(cf_dir): src = os.path.join(cf_dir, name) dst = os.path.join(segment_dir, name) if os.path.isdir(src): shutil.copytree(src, dst) else: shutil.copy2(src, dst)
def upgrade_cmdb_repo_to_v1_1_0(root_dir, dry_run): for source in UPGRADE_V1_1_0_SOURCES: source_dirs = Search.match_dirs(os.path.join('**', source), root=root_dir) for source_dir in source_dirs: target_dir = os.path.join(os.path.dirname(source_dir), UPGRADE_V1_1_0_SOURCES[source]) logger.debug('Checking %s', source_dir) if os.path.isdir(target_dir): continue logger.info('Converting %s into %s', source_dir, target_dir) if source == 'aws': upgrade_cmdb_repo_to_v1_1_0_state(source_dir, dry_run, target_dir) else: upgrade_cmdb_repo_to_v1_1_0_settings(source_dir, dry_run, target_dir) if dry_run: continue # Special processing if source == 'solutions': # Shared solution files are specific to the default segment shared_default_dir = os.path.join(target_dir, 'shared', 'default') os.makedirs(shared_default_dir, exist_ok=True) target_shared_dir = os.path.join(target_dir, 'shared') solution_files = Search.list_files(target_shared_dir) for solution_file in solution_files: src = os.path.join(target_shared_dir, solution_file) dst = os.path.join(shared_default_dir, solution_file) logger.debug('Moving %s to %s', src, dst) shutil.move(src, dst) # Process environments segment_files = Search.match_files(os.path.join( '**', 'segment.json'), root=target_dir) for segment_file in segment_files: segment_dir = os.path.dirname(segment_file) environment_dir = os.path.dirname(segment_dir) # Add environment.json file with open(segment_file, 'rt') as f: segment = json.load(f) environment_id = segment.get('Segment', {}).get('Environment') environment_file = os.path.join(environment_dir, 'environment.json') logger.debug('Creating %s', environment_file) with open(environment_file, 'wt+') as f: json.dump({'Environment': {'Id': environment_id}}, f) logger.debug('Cleaning %s', segment_file) segment_legacy_keys = [ 'Id', 'Name', 'Title', 'Environment' ] for segment_legacy_key in segment_legacy_keys: try: del segment['Segment'][segment_legacy_key] except KeyError: pass with open(segment_file, 'wt') as f: json.dump(segment, f) shared_segment_file = os.path.join(shared_default_dir, 'segment.json') logger.debug('Creating %s', shared_segment_file) with open(shared_segment_file, 'wt+') as f: json.dump({'Segment': {'Id': 'default'}}, f) elif source == 'credentials': pem_files = Search.match_files(os.path.join( '**', 'aws-ssh*.pem'), root=target_dir) for pem_file in pem_files: filename = os.path.basename(pem_file) segment_dir = os.path.dirname(pem_file) # Move the pem files to make them invisible to the generation process src = pem_file dst = os.path.join(segment_dir, '.' + filename) logger.debug('Moving %s to %s', src, dst) shutil.move(src, dst) segment_ignore_file = os.path.join(segment_dir, '.gitignore') if not os.path.isfile(segment_ignore_file): logger.debug('Creaging %s', segment_ignore_file) ignore_list = ['*.plaintext', '*.decrypted', '*.ppk'] with open(segment_ignore_file, 'wt+') as f: f.write('\n'.join(ignore_list)) return True