def remap_cmd(import_job_id: Optional[int], dry_run: bool, service: Optional[str], gov_cloud: bool): import_spec = parse_import_spec(service) partition = 'aws-us-gov' if gov_cloud else 'aws' _log.info('Mapping an AWS import') if import_job_id is None: raise NotImplementedError('Need to query last import job') db = import_session() map_import(db, import_job_id, partition, import_spec) import_job = db.query(ImportJob).get(import_job_id) if import_job is None: raise RuntimeError('Lost import job') refresh_views(db, import_job.provider_account_id) if not dry_run: db.commit() report = report_for_import(db, import_job) print(f'Results - Remap of import #{import_job.id}') print_report(report)
def delete_acct(account_spec: str, dry_run: bool, force: bool): db = import_session() account = db.query(ProviderAccount).filter( ProviderAccount.provider == 'aws', ProviderAccount.name == account_spec).one_or_none() if account is None: raise GFInternal(f'Could not find AWS account {account_spec}') remove = force or query_yes_no( f'Remove AWS account {account.name} from GoldFig?', default='no') if remove: report = delete_account(db, account) print(f'Removed from AWS account {account.name}') for table, count in report.items(): print(f'{table.ljust(36)}{str(count).rjust(6)} items') if not dry_run: db.commit() else: print('Aborting') db.rollback()
def _global_async_proxy(ps: PathStack, import_job_id: int, provider_account_id: int, config: Dict, svc_name: str, account_id: str, service_spec: ServiceSpec, import_fn: GlobalResourceSpec): db = import_session() boto = load_boto_session_from_config(config) proxy = Proxy.build(boto) service_proxy = proxy.service(svc_name) writer = db_import_writer(db, import_job_id, provider_account_id, svc_name, phase=0, source='base') try: import_fn(service_proxy, writer, account_id, ps, service_spec) except Exception as e: _log.error(f'Failed for svc {svc_name}', exc_info=e) raise db.commit()
def _async_proxy(ps: PathStack, import_job_id: int, provider_account_id: int, region: str, config: Dict, svc_name: str, service_spec: ServiceSpec, import_fn: RegionalImportFn): db = import_session() ps = ps.scope(region) boto = load_boto_session_from_config(config) proxy = Proxy.build(boto) service_proxy = proxy.service(svc_name, region) writer = db_import_writer(db, import_job_id, provider_account_id, svc_name, phase=0, source='base') _log.debug(f'Starting {svc_name} - {region}') for resource_name, raw_resources in import_fn(service_proxy, region, service_spec): writer(ps, resource_name, raw_resources, {'region': region}) db.commit() _log.debug(f'Committed {svc_name} - {region}')
def try_rds(): import_session()
def reinstall_views(provider_spec: Optional[str]): db = import_session() provider_account_id = provider_for_spec(db, provider_spec) refresh_views(db, provider_account_id)
def reset_provider_account(provider_account: int): db = import_session() report = reset_account(db, provider_account) db.commit() pprint(report)
def import_aws_cmd(debug: bool, force: bool, external_id: Optional[int], dry_run: bool, service: Optional[str], gov_cloud: bool): partition = 'aws-us-gov' if gov_cloud else 'aws' os.environ[ 'AWS_DEFAULT_REGION'] = 'us-gov-east-1' if gov_cloud else 'us-east-2' import_spec = parse_import_spec(service) db = import_session() boto = get_boto_session() if force: confirm = lambda _: True else: def _confirm(identity: Dict) -> bool: return query_yes_no( f'Add AWS account {identity["Account"]} using identity {identity["Arn"]}?', default='yes') confirm = _confirm import_job = build_aws_import_job(db, boto, confirm, external_id) db.add(import_job) db.flush() region_cache = RegionCache(boto, partition) if debug: run_single_session(db, import_job.id, region_cache, gov_cloud, import_spec) db.flush() map_import(db, import_job.id, partition, import_spec) refresh_views(db, import_job.provider_account_id) if not dry_run: db.commit() print('done', import_job.id) else: accounts = account_paths_for_import(db, import_job) db.commit() # No db required for parallel invocation exceptions = run_parallel_session(region_cache, accounts, import_job, gov_cloud, import_spec) # Make certain we're using the current db session reloaded_import_job = db.query(ImportJob).get(import_job.id) if reloaded_import_job is None: raise RuntimeError('Lost import job') if len(exceptions) == 0: db.commit() try: map_import(db, reloaded_import_job.id, partition, import_spec) db.commit() refresh_views(db, reloaded_import_job.provider_account_id) reloaded_import_job.mark_complete(exceptions=[]) except Exception as e: _log.error('exception caught in map', exc_info=True) exception = traceback.format_exc() reloaded_import_job.mark_complete(exceptions=[exception]) exceptions.append(str(e)) else: reloaded_import_job.mark_complete(exceptions) db.add(reloaded_import_job) db.commit() report = report_for_import(db, reloaded_import_job) print(f'Results - Import #{reloaded_import_job.id}') print_report(report) if len(exceptions) > 0: sys.exit(3)
def list_accounts(): db = import_session() accounts = ProviderAccount.all(db, provider='aws') print( tabulate([(account.provider, account.name) for account in accounts], headers=['Type', 'Account']))