Exemple #1
0
    def fetch_all(self, credentials, regions = None, partition_name = 'aws', targets = None):
        """
        Fetch all the configuration supported by Scout2 for a given service

        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.

        """
        # Initialize targets
        # Tweak params
        regions = [] if regions is None else regions
        realtargets = ()
        if not targets:
            targets = self.targets
        for i, target in enumerate(targets['first_region']):
            params = self.tweak_params(target[3], credentials)
            realtargets = realtargets + ((target[0], target[1], target[2], params, target[4]),)
        targets['first_region'] = realtargets
        realtargets = ()
        for i, target in enumerate(targets['other_regions']):
            params = self.tweak_params(target[3], credentials)
            realtargets = realtargets + ((target[0], target[1], target[2], params, target[4]),)
        targets['other_regions'] = realtargets

        printInfo('Fetching %s config...' % format_service_name(self.service))
        self.fetchstatuslogger = FetchStatusLogger(targets['first_region'], True)
        api_service = 'ec2' if self.service.lower() == 'vpc' else self.service.lower()
        # Init regions
        regions = build_region_list(api_service, regions, partition_name) # TODO: move this code within this class
        self.fetchstatuslogger.counts['regions']['discovered'] = len(regions)
        # Threading to fetch & parse resources (queue consumer)
        q = self._init_threading(self._fetch_target, {}, self.thread_config['parse'])
        # Threading to list resources (queue feeder)
        qr = self._init_threading(self._fetch_region,
                                  {'api_service': api_service, 'credentials': credentials, 'q': q, 'targets': ()},
                                  self.thread_config['list'])
        # Go
        for i, region in enumerate(regions):
            qr.put((region, targets['first_region'] if i == 0 else targets['other_regions']))
        # Join
        qr.join()
        q.join()
        # Show completion and force newline
        self.fetchstatuslogger.show(True)
Exemple #2
0
    def fetch_all(self, credentials, regions=None, partition_name='aws', targets=None):
        """
        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.
        :return:
        """
        regions = [] if regions is None else regions
        global status, formatted_string

        # Initialize targets
        if not targets:
            targets = type(self).targets
        print_info('Fetching %s config...' % format_service_name(self.service))
        formatted_string = None

        # FIXME the below should be in moved to each provider's code

        # Connect to the service
        if self._is_provider('aws'):
            if self.service in ['s3']:  # S3 namespace is global but APIs aren't....
                api_clients = {}
                for region in build_region_list(self.service, regions, partition_name):
                    api_clients[region] = connect_service('s3', credentials, region, silent=True)
                api_client = api_clients[list(api_clients.keys())[0]]
            elif self.service == 'route53domains':
                api_client = connect_service(self.service, credentials, 'us-east-1',
                                             silent=True)  # TODO: use partition's default region
            else:
                api_client = connect_service(self.service, credentials, silent=True)

        elif self._is_provider('gcp'):
            api_client = gcp_connect_service(service=self.service, credentials=credentials)

        elif self._is_provider('azure'):
            api_client = azure_connect_service(service=self.service, credentials=credentials)

        # Threading to fetch & parse resources (queue consumer)
        params = {'api_client': api_client}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        # Threading to parse resources (queue feeder)
        target_queue = self._init_threading(self.__fetch_target, params, self.thread_config['parse'])

        # Threading to list resources (queue feeder)
        params = {'api_client': api_client, 'q': target_queue}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        service_queue = self._init_threading(self.__fetch_service, params, self.thread_config['list'])

        # Init display
        self.fetchstatuslogger = FetchStatusLogger(targets)

        # Go
        for target in targets:
            service_queue.put(target)

        # Blocks until all items in the queue have been gotten and processed.
        service_queue.join()
        target_queue.join()

        # Show completion and force newline
        if self._is_provider('aws'):
            # Show completion and force newline
            if self.service != 'iam':
                self.fetchstatuslogger.show(True)
        else:
            self.fetchstatuslogger.show(True)

        # Threads should stop running as queues are empty
        self.run_target_threads = False
        self.run_service_threads = False
        # Put x items in the queues to ensure threads run one last time (and exit)
        for i in range(self.thread_config['parse']):
            target_queue.put(None)
        for j in range(self.thread_config['list']):
            service_queue.put(None)
Exemple #3
0
class BaseConfig(object):

    def __init__(self, thread_config=4, **kwargs):
        """

        :param thread_config:
        """

        self.library_type = None if not hasattr(self, 'library_type') else self.library_type

        self.service = re.sub(r'Config$', "", type(self).__name__).lower()
        self.thread_config = thread_configs[thread_config]

        # Booleans that define if threads should keep running
        self.run_service_threads = True
        self.run_target_threads = True

    def _is_provider(self, provider_name):
        return False

    def get_non_provider_id(self, name):
        """
        Not all AWS resources have an ID and some services allow the use of "." in names, which break's Scout2's
        recursion scheme if name is used as an ID. Use SHA1(name) instead.

        :param name:                    Name of the resource to
        :return:                        SHA1(name)
        """
        m = sha1()
        m.update(name.encode('utf-8'))
        return m.hexdigest()

    def fetch_all(self, credentials, regions=None, partition_name='aws', targets=None):
        """
        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.
        :return:
        """
        regions = [] if regions is None else regions
        global status, formatted_string

        # Initialize targets
        if not targets:
            targets = type(self).targets
        print_info('Fetching %s config...' % format_service_name(self.service))
        formatted_string = None

        # FIXME the below should be in moved to each provider's code

        # Connect to the service
        if self._is_provider('aws'):
            if self.service in ['s3']:  # S3 namespace is global but APIs aren't....
                api_clients = {}
                for region in build_region_list(self.service, regions, partition_name):
                    api_clients[region] = connect_service('s3', credentials, region, silent=True)
                api_client = api_clients[list(api_clients.keys())[0]]
            elif self.service == 'route53domains':
                api_client = connect_service(self.service, credentials, 'us-east-1',
                                             silent=True)  # TODO: use partition's default region
            else:
                api_client = connect_service(self.service, credentials, silent=True)

        elif self._is_provider('gcp'):
            api_client = gcp_connect_service(service=self.service, credentials=credentials)

        elif self._is_provider('azure'):
            api_client = azure_connect_service(service=self.service, credentials=credentials)

        # Threading to fetch & parse resources (queue consumer)
        params = {'api_client': api_client}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        # Threading to parse resources (queue feeder)
        target_queue = self._init_threading(self.__fetch_target, params, self.thread_config['parse'])

        # Threading to list resources (queue feeder)
        params = {'api_client': api_client, 'q': target_queue}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        service_queue = self._init_threading(self.__fetch_service, params, self.thread_config['list'])

        # Init display
        self.fetchstatuslogger = FetchStatusLogger(targets)

        # Go
        for target in targets:
            service_queue.put(target)

        # Blocks until all items in the queue have been gotten and processed.
        service_queue.join()
        target_queue.join()

        # Show completion and force newline
        if self._is_provider('aws'):
            # Show completion and force newline
            if self.service != 'iam':
                self.fetchstatuslogger.show(True)
        else:
            self.fetchstatuslogger.show(True)

        # Threads should stop running as queues are empty
        self.run_target_threads = False
        self.run_service_threads = False
        # Put x items in the queues to ensure threads run one last time (and exit)
        for i in range(self.thread_config['parse']):
            target_queue.put(None)
        for j in range(self.thread_config['list']):
            service_queue.put(None)

    def __fetch_target(self, q, params):
        global status
        try:
            while self.run_target_threads:
                try:
                    target_type, target = q.get() or (None, None)
                    if target_type and target:
                        # Make a full copy of the target in case we need to re-queue it
                        backup = copy.deepcopy(target)
                        method = getattr(self, 'parse_%s' %
                                         target_type.replace('.', '_'))  # TODO fix this, hack for GCP API Client libs
                        method(target, params)
                        self.fetchstatuslogger.counts[target_type]['fetched'] += 1
                        self.fetchstatuslogger.show()
                except Exception as e:
                    if hasattr(e, 'response') and \
                            'Error' in e.response and \
                            e.response['Error']['Code'] in ['Throttling']:
                        q.put((target_type, backup), )
                    else:
                        print_exception(e)
                finally:
                    q.task_done()
        except Exception as e:
            print_exception(e)
            pass

    def __fetch_service(self, q, params):
        api_client = params['api_client']
        try:
            while self.run_service_threads:
                try:
                    target_type, response_attribute, list_method_name, list_params, ignore_list_error = q.get() or (None, None, None, None, None)

                    if target_type:

                        if not list_method_name:
                            continue

                        try:
                            method = self._get_method(api_client, target_type, list_method_name)
                        except Exception as e:
                            print_exception(e)
                            continue

                        try:
                            targets = self._get_targets(response_attribute, api_client, method, list_params, ignore_list_error)
                        except Exception as e:
                            if not ignore_list_error:
                                print_exception(e)
                            targets = []

                        self.fetchstatuslogger.counts[target_type]['discovered'] += len(targets)

                        for target in targets:
                            params['q'].put((target_type, target), )

                except Exception as e:
                    print_exception(e)
                finally:
                    q.task_done()
        except Exception as e:
            print_exception(e)
            pass

    def _get_method(self, api_client, target_type, list_method_name):
        """
        Gets the appropriate method, required as each provider may have particularities

        :return:
        """
        return None

    def _get_targets(self, response_attribute, api_client, method, list_params, ignore_list_error):
        """
        Gets the targets, required as each provider may have particularities

        :return:
        """
        return None

    def finalize(self):
        for t in self.fetchstatuslogger.counts:
            setattr(self, '%s_count' % t, self.fetchstatuslogger.counts[t]['fetched'])
        self.__delattr__('fetchstatuslogger')

    def _init_threading(self, function, params=None, num_threads=10):
        params = {} if params is None else params
        # Init queue and threads
        q = Queue(maxsize=0)  # TODO: find something appropriate
        for i in range(num_threads):
            worker = Thread(target=function, args=(q, params))
            worker.setDaemon(True)
            worker.start()
        return q
Exemple #4
0
    def fetch_all(self,
                  credentials,
                  regions=[],
                  partition_name='aws',
                  targets=None):
        """
        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.
        :return:
        """
        global status, formatted_string

        # Initialize targets
        if not targets:
            targets = type(self).targets
        printInfo('Fetching %s config...' % format_service_name(self.service))
        formatted_string = None

        # Connect to the service
        if self._is_provider('aws'):
            if self.service in [
                    's3'
            ]:  # S3 namespace is global but APIs aren't....
                api_clients = {}
                for region in build_region_list(self.service, regions,
                                                partition_name):
                    api_clients[region] = connect_service('s3',
                                                          credentials,
                                                          region,
                                                          silent=True)
                api_client = api_clients[list(api_clients.keys())[0]]
            elif self.service == 'route53domains':
                api_client = connect_service(
                    self.service, credentials, 'us-east-1',
                    silent=True)  # TODO: use partition's default region
            else:
                api_client = connect_service(self.service,
                                             credentials,
                                             silent=True)

        elif self._is_provider('gcp'):
            api_client = gcp_connect_service(service=self.service,
                                             credentials=credentials)
        elif self._is_provider('azure'):
            api_client = azure_connect_service(service=self.service,
                                               credentials=credentials)

        # Threading to fetch & parse resources (queue consumer)
        params = {'api_client': api_client}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        # Threading to parse resources (queue feeder)
        target_queue = self._init_threading(self.__fetch_target, params,
                                            self.thread_config['parse'])

        # Threading to list resources (queue feeder)
        params = {'api_client': api_client, 'q': target_queue}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        service_queue = self._init_threading(self.__fetch_service, params,
                                             self.thread_config['list'])

        # Init display
        self.fetchstatuslogger = FetchStatusLogger(targets)

        # Go
        for target in targets:
            service_queue.put(target)

        # Join
        service_queue.join()
        target_queue.join()

        if self._is_provider('aws'):
            # Show completion and force newline
            if self.service != 'iam':
                self.fetchstatuslogger.show(True)
        else:
            self.fetchstatuslogger.show(True)
Exemple #5
0
class RegionalServiceConfig(object):
    """
    Single service configuration for non-global services

    :ivar regions:                      Dictionary of regions
    :ivar service:                      Name of the service
    """
    def __init__(self, service_metadata=None, thread_config=4):
        service_metadata = {} if service_metadata is None else service_metadata

        self.regions = {}
        self.thread_config = thread_configs[thread_config]
        self.service = re.sub(r'Config$', "", type(self).__name__).lower()
        self.fetchstatuslogger = None

        # Booleans that define if threads should keep running
        self.run_q_threads = True
        self.run_qr_threads = True

        if service_metadata != {}:
            self.resource_types = {'global': [], 'region': [], 'vpc': []}
            self.targets = {'first_region': (), 'other_regions': ()}
            for resource in service_metadata['resources']:
                only_first_region = False
                if re.match(r'.*?\.vpcs\.id\..*?',
                            service_metadata['resources'][resource]['path']):
                    self.resource_types['vpc'].append(resource)
                elif re.match(r'.*?\.regions\.id\..*?',
                              service_metadata['resources'][resource]['path']):
                    self.resource_types['region'].append(resource)
                else:
                    only_first_region = True
                    self.resource_types['global'].append(resource)
                resource_metadata = service_metadata['resources'][resource]
                if 'api_call' not in resource_metadata:
                    continue
                params = resource_metadata[
                    'params'] if 'params' in resource_metadata else {}
                ignore_exceptions = resource_metadata.get(
                    'no_exceptions', False)
                if not only_first_region:
                    self.targets['other_regions'] += (
                        (resource, resource_metadata['response'],
                         resource_metadata['api_call'], params,
                         ignore_exceptions), )
                self.targets['first_region'] += (
                    (resource, resource_metadata['response'],
                     resource_metadata['api_call'], params,
                     ignore_exceptions), )

    def init_region_config(self, region):
        """
        Initialize the region's configuration

        :param region:                  Name of the region
        """
        self.regions[region] = self.region_config_class(
            region_name=region, resource_types=self.resource_types)

    def fetch_all(self,
                  credentials,
                  regions=None,
                  partition_name='aws',
                  targets=None):
        """
        Fetch all the configuration supported by Scout2 for a given service

        :param credentials:             F
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.

        """
        # Initialize targets
        # Tweak params
        regions = [] if regions is None else regions
        realtargets = ()
        if not targets:
            targets = self.targets
        for i, target in enumerate(targets['first_region']):
            params = self.tweak_params(target[3], credentials)
            realtargets = realtargets + (
                (target[0], target[1], target[2], params, target[4]), )
        targets['first_region'] = realtargets
        realtargets = ()
        for i, target in enumerate(targets['other_regions']):
            params = self.tweak_params(target[3], credentials)
            realtargets = realtargets + (
                (target[0], target[1], target[2], params, target[4]), )
        targets['other_regions'] = realtargets

        print_info('Fetching %s config...' % format_service_name(self.service))
        self.fetchstatuslogger = FetchStatusLogger(targets['first_region'],
                                                   True)
        api_service = 'ec2' if self.service.lower(
        ) == 'vpc' else self.service.lower()

        # Init regions
        regions = build_region_list(
            api_service, regions,
            partition_name)  # TODO: move this code within this class
        self.fetchstatuslogger.counts['regions']['discovered'] = len(regions)

        # Threading to fetch & parse resources (queue consumer)
        q = self._init_threading(self._fetch_target, {},
                                 self.thread_config['parse'])

        # Threading to list resources (queue feeder)
        qr = self._init_threading(
            self._fetch_region, {
                'api_service': api_service,
                'credentials': credentials,
                'q': q,
                'targets': ()
            }, self.thread_config['list'])

        # Go
        for i, region in enumerate(regions):
            qr.put((region, targets['first_region']
                    if i == 0 else targets['other_regions']))

        # Blocks until all items in the queue have been gotten and processed.
        qr.join()
        q.join()

        # Show completion and force newline
        self.fetchstatuslogger.show(True)

        # Threads should stop running as queues are empty
        self.run_qr_threads = False
        self.run_q_threads = False
        # Put x items in the queues to ensure threads run one last time (and exit)
        for i in range(self.thread_config['parse']):
            q.put(None)
        for j in range(self.thread_config['list']):
            qr.put(None)

    @staticmethod
    def _init_threading(function, params=None, num_threads=10):
        """
        Initialize queue and threads

        :param function:
        :param params:
        :param num_threads:
        :return:
        """
        params = {} if params is None else params
        q = Queue(maxsize=0)  # TODO: find something appropriate
        for i in range(num_threads):
            worker = Thread(target=function, args=(q, params))
            worker.setDaemon(True)
            worker.start()
        return q

    def _fetch_region(self, q, params):
        global api_clients
        try:
            while self.run_qr_threads:
                try:
                    region, targets = q.get() or (None, None)
                    if region:
                        self.init_region_config(region)
                        api_client = connect_service(params['api_service'],
                                                     params['credentials'],
                                                     region,
                                                     silent=True)
                        api_clients[region] = api_client
                        # TODO : something here for single_region stuff
                        self.regions[region].fetch_all(
                            api_client, self.fetchstatuslogger, params['q'],
                            targets)  # params['targets'])
                        self.fetchstatuslogger.counts['regions'][
                            'fetched'] += 1
                except Exception as e:
                    print_exception(e)
                finally:
                    q.task_done()
        except Exception as e:
            print_exception(e)
            pass

    def _fetch_target(self, q, params):
        try:
            while self.run_q_threads:
                try:
                    method, region, target = q.get() or (None, None, None)
                    if method:
                        backup = copy.deepcopy(target)

                        if method.__name__ == 'store_target':
                            target_type = target['scout2_target_type']
                        else:
                            target_type = method.__name__.replace(
                                'parse_', '') + 's'
                        method(params, region, target)
                        self.fetchstatuslogger.counts[target_type][
                            'fetched'] += 1
                        self.fetchstatuslogger.show()
                except Exception as e:
                    if is_throttled(e):
                        q.put((method, region, backup))
                    else:
                        print_exception(e)
                finally:
                    q.task_done()
        except Exception as e:
            print_exception(e)
            pass

    def finalize(self):
        for t in self.fetchstatuslogger.counts:
            setattr(self, '%s_count' % t,
                    self.fetchstatuslogger.counts[t]['fetched'])
        delattr(self, 'fetchstatuslogger')
        for r in self.regions:
            if hasattr(self.regions[r], 'fetchstatuslogger'):
                delattr(self.regions[r], 'fetchstatuslogger')

    def tweak_params(self, params, credentials):
        if type(params) == dict:
            for k in params:
                params[k] = self.tweak_params(params[k], credentials)
        elif type(params) == list:
            newparams = []
            for v in params:
                newparams.append(self.tweak_params(v, credentials))
            params = newparams
        else:
            if params == '_AWS_ACCOUNT_ID_':
                params = get_aws_account_id(credentials)
        return params