Esempio n. 1
0
def _get_or_create_provider(db: Session, proxy: Proxy, identity: Dict,
                            confirm: ConfirmAcct,
                            external_id: Optional[int]) -> ProviderAccount:
    org = proxy.service('organizations')
    try:
        org_resp = org.get('describe_organization')['Organization']
        org_id = org_resp['Id']
    except botocore.exceptions.ClientError as e:
        code = e.response.get('Error', {}).get('Code')
        if code == 'AWSOrganizationsNotInUseException':
            org_id = f'OrgDummy:{identity["Account"]}'
        else:
            raise
    account_query = db.query(ProviderAccount).filter(
        ProviderAccount.provider == 'aws', ProviderAccount.name == org_id)
    if external_id is not None:
        account_query = account_query.filter(
            ProviderAccount.external_id == external_id)
    account = account_query.one_or_none()
    if account is not None:
        _require_credential(db, account.id, identity)
        return account
    add = confirm(identity)
    if not add:
        raise GFError('User cancelled')
    return _create_provider_and_credential(db, proxy, identity, external_id)
Esempio n. 2
0
def build_aws_import_job(db: Session, session: boto.Session,
                         confirm: ConfirmAcct,
                         external_id: Optional[int]) -> ImportJob:
    proxy = Proxy.build(session)
    sts = session.create_client('sts')
    identity = sts.get_caller_identity()
    provider = _get_or_create_provider(db, proxy, identity, confirm,
                                       external_id)
    desc = _build_import_job_desc(proxy, identity)
    org_id = desc['aws_org']['Id']
    return ImportJob.create(provider, desc, org_id)
Esempio n. 3
0
def _build_import_job_desc(proxy: Proxy, identity: Dict) -> Dict:
    account_id = identity['Account']
    org, graph = _build_org_graph(proxy.service('organizations'), account_id)
    return {
        'account': {
            'account_id': org['Id'],
            'provider': 'aws'
        },
        'principal': {
            'provider_id': identity['UserId'],
            'provider_uri': identity['Arn']
        },
        'aws_org': org,
        'aws_graph': graph
    }
Esempio n. 4
0
def add_amis_to_import_job(proxy: Proxy, writer: ImportWriter, ps: PathStack,
                           region: str, amis: List[str]) -> str:
    ps = ps.scope(region)
    service_proxy = proxy.service('ec2', region)
    result = service_proxy.list(
        'describe_images',
        ImageIds=amis,
        # Remove the default filters
        Filters=[])
    _log.debug(f'describe images result {result}')
    if result is not None:
        resource_name = result[0]
        raw_resources = result[1]
        # We can't add launch permissions here because we don't own the images
        #_add_image_attributes(service_proxy, raw_resources)
        writer(ps, resource_name, raw_resources, {'region': region})
    return ps.path()
Esempio n. 5
0
def add_logs_resource_policies(db: Session, proxy: Proxy,
                               region_cache: RegionCache, writer: ImportWriter,
                               import_job: ImportJob, ps: PathStack,
                               account_id: str):
  for region in region_cache.regions_for_service('logs'):
    logs_proxy = proxy.service('logs', region)
    policies = _import_resource_policies(logs_proxy)
    synthesized = defaultdict(lambda: [])
    for prefix, statements in policies.items():
      for log_group_uri in _log_group_uris_by_prefix(
          db, import_job.provider_account_id, account_id, region, prefix):
        synthesized[log_group_uri] += statements
    for uri, statements in synthesized.items():
      policy = _make_policy(statements)
      writer(ps, 'ResourcePolicy', {
          'Policy': policy,
          'arn': uri
      }, {'region': region})
Esempio n. 6
0
 def import_to_db(db: Session, import_job_id: int,
                  service_spec: ServiceSpec):
     job = db.query(ImportJob).get(import_job_id)
     if job is None:
         raise GFInternal('Lost ImportJob')
     writer = db_import_writer(db,
                               job.id,
                               job.provider_account_id,
                               self.name,
                               phase=0,
                               source='base')
     for path, account in account_paths_for_import(db, job):
         boto = load_boto_session(account)
         proxy = Proxy.build(boto)
         ps = PathStack.from_import_job(job)
         service_proxy = proxy.service(self.name)
         for fn in resource_fns:
             fn(service_proxy, writer, account.scope, ps, service_spec)
Esempio n. 7
0
def _global_async_proxy(ps: PathStack, import_job_id: int,
                        provider_account_id: int, config: Dict, svc_name: str,
                        account_id: str, service_spec: ServiceSpec,
                        import_fn: GlobalResourceSpec):
    db = import_session()
    boto = load_boto_session_from_config(config)
    proxy = Proxy.build(boto)
    service_proxy = proxy.service(svc_name)
    writer = db_import_writer(db,
                              import_job_id,
                              provider_account_id,
                              svc_name,
                              phase=0,
                              source='base')
    try:
        import_fn(service_proxy, writer, account_id, ps, service_spec)
    except Exception as e:
        _log.error(f'Failed for svc {svc_name}', exc_info=e)
        raise
    db.commit()
Esempio n. 8
0
def _async_proxy(ps: PathStack, import_job_id: int, provider_account_id: int,
                 region: str, config: Dict, svc_name: str,
                 service_spec: ServiceSpec, import_fn: RegionalImportFn):
    db = import_session()
    ps = ps.scope(region)
    boto = load_boto_session_from_config(config)
    proxy = Proxy.build(boto)
    service_proxy = proxy.service(svc_name, region)
    writer = db_import_writer(db,
                              import_job_id,
                              provider_account_id,
                              svc_name,
                              phase=0,
                              source='base')
    _log.debug(f'Starting {svc_name} - {region}')
    for resource_name, raw_resources in import_fn(service_proxy, region,
                                                  service_spec):
        writer(ps, resource_name, raw_resources, {'region': region})
    db.commit()
    _log.debug(f'Committed {svc_name} - {region}')
Esempio n. 9
0
 def import_region_to_db(db: Session, import_job_id: int, region: str,
                         service_spec: ServiceSpec):
     job = db.query(ImportJob).get(import_job_id)
     if job is None:
         raise GFInternal('Lost ImportJob')
     writer = db_import_writer(db,
                               job.id,
                               job.provider_account_id,
                               svc_name,
                               phase=0,
                               source='base')
     for path, account in account_paths_for_import(db, job):
         boto = load_boto_session(account)
         proxy = Proxy.build(boto)
         ps = PathStack.from_import_job(job).scope(account.scope)
         service_proxy = proxy.service(svc_name, region)
         ps = ps.scope(region)
         for resource_name, raw_resources in fn(service_proxy, region,
                                                service_spec):
             writer(ps, resource_name, raw_resources, {'region': region})
Esempio n. 10
0
def _create_provider_and_credential(
        db: Session, proxy: Proxy, identity,
        external_id: Optional[int]) -> ProviderAccount:
    account_id = identity['Account']
    org = proxy.service('organizations')
    try:
        org_resp = org.get('describe_organization')['Organization']
        org_id = org_resp['Id']
    except botocore.exceptions.ClientError as e:
        code = e.response.get('Error', {}).get('Code')
        if code == 'AWSOrganizationsNotInUseException':
            org_id = f'OrgDummy:{account_id}'
        else:
            raise
    provider = ProviderAccount(provider='aws',
                               name=org_id,
                               external_id=external_id)
    db.add(provider)
    db.flush()
    _require_credential(db, provider.id, identity)
    return provider
Esempio n. 11
0
def synthesize_account_root(proxy: Proxy, db: Session, import_job: ImportJob,
                            path: str, account_id: str, partition: str):
    service_proxy = proxy.service('iam')
    mfa_resp = service_proxy.list('list_virtual_mfa_devices')
    has_virtual_mfa = False
    if mfa_resp is not None:
        root_mfa_arn = f'arn:aws:iam::{account_id}:mfa/root-account-mfa-device'
        mfas = mfa_resp[1]['VirtualMFADevices']
        for mfa in mfas:
            if mfa['SerialNumber'] == root_mfa_arn:
                has_virtual_mfa = True
                break

    arn = f'arn:{partition}:iam::{account_id}:root'
    mapped = MappedResource(name='<root account>',
                            uri=arn,
                            provider_type='RootAccount',
                            raw={
                                'Arn': arn,
                                'has_virtual_mfa': has_virtual_mfa
                            },
                            service='iam',
                            category=None)
    attrs: List[MappedAttribute] = [
        MappedAttribute(type='provider', name='Arn', value=arn),
        MappedAttribute(type='provider',
                        name='has_virtual_mfa',
                        value=has_virtual_mfa)
    ]
    apply_mapped_attrs(db,
                       import_job,
                       path,
                       mapped,
                       attrs,
                       source='base',
                       raw_import_id=None)
Esempio n. 12
0
def map_import(db: Session, import_job_id: int, partition: str,
               spec: ImportSpec):
    import_job = db.query(ImportJob).get(import_job_id)
    if import_job is None:
        raise GFInternal('Lost ImportJob')
    ps = PathStack.from_import_job(import_job)
    mapper = _get_mapper(import_job)
    gate = service_gate(spec)
    for path, account in account_paths_for_import(db, import_job):
        uri_fn = get_arn_fn(account.scope, partition)
        ps = PathStack.from_import_job(import_job).scope(account.scope)
        map_resource_prefix(db, import_job, ps.path(), mapper, uri_fn)
        boto = None
        proxy = None
        if gate('iam') is not None:
            boto = load_boto_session(account)
            proxy = Proxy.build(boto)
            synthesize_account_root(proxy, db, import_job, ps.path(),
                                    account.scope, partition)
        ec2_spec = gate('ec2')
        if ec2_spec is not None and resource_gate(ec2_spec, 'Images'):
            # Additional ec2 work
            if boto is None or proxy is None:
                boto = load_boto_session(account)
                proxy = Proxy.build(boto)
            adjunct_writer = db_import_writer(db,
                                              import_job.id,
                                              import_job.provider_account_id,
                                              'ec2',
                                              phase=1,
                                              source='base')
            find_adjunct_data(db, proxy, adjunct_writer, import_job, ps,
                              import_job)

        logs_spec = gate('logs')
        if logs_spec is not None and resource_gate(logs_spec,
                                                   'ResourcePolicies'):
            if boto is None or proxy is None:
                boto = load_boto_session(account)
                proxy = Proxy.build(boto)
            region_cache = RegionCache(boto, partition)
            adjunct_writer = db_import_writer(db,
                                              import_job.id,
                                              import_job.provider_account_id,
                                              'logs',
                                              phase=1,
                                              source='logspolicies')
            add_logs_resource_policies(db, proxy, region_cache, adjunct_writer,
                                       import_job, ps, account.scope)

        for source in AWS_SOURCES:
            map_partial_prefix(db, mapper, import_job, source, ps.path(),
                               uri_fn)
            map_partial_deletes(db, import_job, ps.path(), source, spec)
        # Re-map anything we've added
        map_resource_prefix(db, import_job, ps.path(), mapper, uri_fn)

        # Handle deletes
        map_resource_deletes(db, ps.path(), import_job, spec)

        found_relations = map_resource_relations(db, import_job, ps.path(),
                                                 mapper, uri_fn)

        map_relation_deletes(db, import_job, ps.path(), found_relations, spec)