Exemplo n.º 1
0
    async def is_api_enabled(self, project_id, service):
        """
        Given a project ID and service name, this method tries to determine if the service's API is enabled
        """

        serviceusage_client = self._build_arbitrary_client('serviceusage',
                                                           'v1',
                                                           force_new=True)
        services = serviceusage_client.services()
        try:
            request = services.list(parent=f'projects/{project_id}')
            services_response = await GCPFacadeUtils.get_all(
                'services', request, services)
        except Exception as e:
            print_exception(
                f'Could not fetch the state of services for project \"{project_id}\", '
                f'including {format_service_name(service.lower())} in the execution',
                {'exception': e})
            return True

        # These are hardcoded endpoint correspondences as there's no easy way to do this.
        if service == 'IAM':
            endpoint = 'iam'
        elif service == 'KMS':
            endpoint = 'cloudkms'
        elif service == 'CloudStorage':
            endpoint = 'storage-component'
        elif service == 'CloudSQL':
            endpoint = 'sql-component'
        elif service == 'ComputeEngine':
            endpoint = 'compute'
        elif service == 'KubernetesEngine':
            endpoint = 'container'
        elif service == 'StackdriverLogging':
            endpoint = 'logging'
        elif service == 'StackdriverMonitoring':
            endpoint = 'monitoring'
        else:
            print_debug(
                'Could not validate the state of the {} API for project \"{}\", '
                'including it in the execution'.format(
                    format_service_name(service.lower()), project_id))
            return True

        for s in services_response:
            if endpoint in s.get('name'):
                if s.get('state') == 'ENABLED':
                    return True
                else:
                    print_info(
                        '{} API not enabled for project \"{}\", skipping'.
                        format(format_service_name(service.lower()),
                               project_id))
                    return False

        print_error(
            f'Could not validate the state of the {format_service_name(service.lower())} API '
            f'for project \"{project_id}\", including it in the execution')
        return True
Exemplo n.º 2
0
    async def _fetch(self, service, regions=None, excluded_regions=None):
        try:
            print_info('Fetching resources for the {} service'.format(
                format_service_name(service)))
            service_config = getattr(self, service)
            # call fetch method for the service
            if 'fetch_all' in dir(service_config):
                method_args = {}

                if regions:
                    method_args['regions'] = regions
                if excluded_regions:
                    method_args['excluded_regions'] = excluded_regions

                if self._is_provider('aws'):
                    if service != 'iam':
                        method_args['partition_name'] = get_partition_name(
                            self.credentials.session)

                await service_config.fetch_all(**method_args)
                if hasattr(service_config, 'finalize'):
                    await service_config.finalize()
            else:
                print_debug('No method to fetch service %s.' % service)
        except Exception as e:
            print_exception(f'Could not fetch {service} configuration: {e}')
Exemplo n.º 3
0
    async def _fetch(self, service, regions):
        try:
            print_info('Fetching resources for the {} service'.format(
                format_service_name(service)))
            service_config = getattr(self, service)
            # call fetch method for the service
            if 'fetch_all' in dir(service_config):
                method_args = {
                    'credentials': self.credentials,
                    'regions': regions
                }

                if self._is_provider('aws'):
                    if service != 'iam':
                        method_args['partition_name'] = get_partition_name(
                            self.credentials)

                await service_config.fetch_all(**method_args)
                if hasattr(service_config, 'finalize'):
                    await service_config.finalize()
            else:
                print_debug('No method to fetch service %s.' % service)
        except Exception as e:
            print_error('Error: could not fetch %s configuration.' % service)
            print_exception(e)
Exemplo n.º 4
0
    def fetch_all(self, credentials, regions = None, partition_name = 'aws', targets = None):
        """
        Fetch all the configuration supported by Scout2 for a given service

        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.

        """
        # Initialize targets
        # Tweak params
        regions = [] if regions is None else regions
        realtargets = ()
        if not targets:
            targets = self.targets
        for i, target in enumerate(targets['first_region']):
            params = self.tweak_params(target[3], credentials)
            realtargets = realtargets + ((target[0], target[1], target[2], params, target[4]),)
        targets['first_region'] = realtargets
        realtargets = ()
        for i, target in enumerate(targets['other_regions']):
            params = self.tweak_params(target[3], credentials)
            realtargets = realtargets + ((target[0], target[1], target[2], params, target[4]),)
        targets['other_regions'] = realtargets

        printInfo('Fetching %s config...' % format_service_name(self.service))
        self.fetchstatuslogger = FetchStatusLogger(targets['first_region'], True)
        api_service = 'ec2' if self.service.lower() == 'vpc' else self.service.lower()
        # Init regions
        regions = build_region_list(api_service, regions, partition_name) # TODO: move this code within this class
        self.fetchstatuslogger.counts['regions']['discovered'] = len(regions)
        # Threading to fetch & parse resources (queue consumer)
        q = self._init_threading(self._fetch_target, {}, self.thread_config['parse'])
        # Threading to list resources (queue feeder)
        qr = self._init_threading(self._fetch_region,
                                  {'api_service': api_service, 'credentials': credentials, 'q': q, 'targets': ()},
                                  self.thread_config['list'])
        # Go
        for i, region in enumerate(regions):
            qr.put((region, targets['first_region'] if i == 0 else targets['other_regions']))
        # Join
        qr.join()
        q.join()
        # Show completion and force newline
        self.fetchstatuslogger.show(True)
Exemplo n.º 5
0
    async def fetch(self, services=None, regions=None):
        # If services is set to None, fetch all services:
        services = vars(self) if services is None else services
        regions = [] if regions is None else regions

        # First, print services that are going to get skipped:
        for service in vars(self):
            if service not in services:
                print_debug('Skipping the {} service'.format(
                    format_service_name(service)))

        # Then, fetch concurrently all services:
        tasks = {
            asyncio.ensure_future(self._fetch(service, regions))
            for service in services
        }
        await asyncio.wait(tasks)
Exemplo n.º 6
0
    async def fetch(self, services: list, regions: list):

        if not services:
            print_debug('No services to scan')
        else:
            # Print services that are going to get skipped:
            for service in vars(self):
                if service not in services:
                    print_debug('Skipping the {} service'.format(
                        format_service_name(service)))

            # Remove "credentials" as it isn't a service
            if 'credentials' in services: services.remove('credentials')

            # Then, fetch concurrently all services:
            if services:
                tasks = {
                    asyncio.ensure_future(self._fetch(service, regions))
                    for service in services
                }
                await asyncio.wait(tasks)
Exemplo n.º 7
0
    def fetch_all(self, credentials, regions=None, partition_name='aws', targets=None):
        """
        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.
        :return:
        """
        regions = [] if regions is None else regions
        global status, formatted_string

        # Initialize targets
        if not targets:
            targets = type(self).targets
        print_info('Fetching %s config...' % format_service_name(self.service))
        formatted_string = None

        # FIXME the below should be in moved to each provider's code

        # Connect to the service
        if self._is_provider('aws'):
            if self.service in ['s3']:  # S3 namespace is global but APIs aren't....
                api_clients = {}
                for region in build_region_list(self.service, regions, partition_name):
                    api_clients[region] = connect_service('s3', credentials, region, silent=True)
                api_client = api_clients[list(api_clients.keys())[0]]
            elif self.service == 'route53domains':
                api_client = connect_service(self.service, credentials, 'us-east-1',
                                             silent=True)  # TODO: use partition's default region
            else:
                api_client = connect_service(self.service, credentials, silent=True)

        elif self._is_provider('gcp'):
            api_client = gcp_connect_service(service=self.service, credentials=credentials)

        elif self._is_provider('azure'):
            api_client = azure_connect_service(service=self.service, credentials=credentials)

        # Threading to fetch & parse resources (queue consumer)
        params = {'api_client': api_client}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        # Threading to parse resources (queue feeder)
        target_queue = self._init_threading(self.__fetch_target, params, self.thread_config['parse'])

        # Threading to list resources (queue feeder)
        params = {'api_client': api_client, 'q': target_queue}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        service_queue = self._init_threading(self.__fetch_service, params, self.thread_config['list'])

        # Init display
        self.fetchstatuslogger = FetchStatusLogger(targets)

        # Go
        for target in targets:
            service_queue.put(target)

        # Blocks until all items in the queue have been gotten and processed.
        service_queue.join()
        target_queue.join()

        # Show completion and force newline
        if self._is_provider('aws'):
            # Show completion and force newline
            if self.service != 'iam':
                self.fetchstatuslogger.show(True)
        else:
            self.fetchstatuslogger.show(True)

        # Threads should stop running as queues are empty
        self.run_target_threads = False
        self.run_service_threads = False
        # Put x items in the queues to ensure threads run one last time (and exit)
        for i in range(self.thread_config['parse']):
            target_queue.put(None)
        for j in range(self.thread_config['list']):
            service_queue.put(None)
Exemplo n.º 8
0
    def fetch_all(self,
                  credentials,
                  regions=[],
                  partition_name='aws',
                  targets=None):
        """
        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.
        :return:
        """
        global status, formatted_string

        # Initialize targets
        if not targets:
            targets = type(self).targets
        printInfo('Fetching %s config...' % format_service_name(self.service))
        formatted_string = None

        # Connect to the service
        if self._is_provider('aws'):
            if self.service in [
                    's3'
            ]:  # S3 namespace is global but APIs aren't....
                api_clients = {}
                for region in build_region_list(self.service, regions,
                                                partition_name):
                    api_clients[region] = connect_service('s3',
                                                          credentials,
                                                          region,
                                                          silent=True)
                api_client = api_clients[list(api_clients.keys())[0]]
            elif self.service == 'route53domains':
                api_client = connect_service(
                    self.service, credentials, 'us-east-1',
                    silent=True)  # TODO: use partition's default region
            else:
                api_client = connect_service(self.service,
                                             credentials,
                                             silent=True)

        elif self._is_provider('gcp'):
            api_client = gcp_connect_service(service=self.service,
                                             credentials=credentials)
        elif self._is_provider('azure'):
            api_client = azure_connect_service(service=self.service,
                                               credentials=credentials)

        # Threading to fetch & parse resources (queue consumer)
        params = {'api_client': api_client}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        # Threading to parse resources (queue feeder)
        target_queue = self._init_threading(self.__fetch_target, params,
                                            self.thread_config['parse'])

        # Threading to list resources (queue feeder)
        params = {'api_client': api_client, 'q': target_queue}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        service_queue = self._init_threading(self.__fetch_service, params,
                                             self.thread_config['list'])

        # Init display
        self.fetchstatuslogger = FetchStatusLogger(targets)

        # Go
        for target in targets:
            service_queue.put(target)

        # Join
        service_queue.join()
        target_queue.join()

        if self._is_provider('aws'):
            # Show completion and force newline
            if self.service != 'iam':
                self.fetchstatuslogger.show(True)
        else:
            self.fetchstatuslogger.show(True)
Exemplo n.º 9
0
    def set_definition(self,
                       rule_definitions,
                       attributes=None,
                       ip_ranges=None,
                       params=None):
        """
        Update every attribute of the rule by setting the argument values as necessary

        :param rule_definitions:            TODO
        :param attributes:                  TODO
        :param ip_ranges:                   TODO
        :param params:                      TODO
        :return:
        """
        attributes = [] if attributes is None else attributes
        ip_ranges = [] if ip_ranges is None else ip_ranges
        params = {} if params is None else params
        try:
            string_definition = rule_definitions[
                self.filename].string_definition
            # Load condition dependencies
            definition = json.loads(string_definition)
            definition['conditions'] += self.conditions
            loaded_conditions = []
            for condition in definition['conditions']:
                if condition[0].startswith('_INCLUDE_('):
                    include = re.findall(r'_INCLUDE_\((.*?)\)',
                                         condition[0])[0]
                    # new_conditions = load_data(include, key_name = 'conditions')
                    rules_path = f'{self.data_path}/{include}'
                    with open(rules_path) as f:
                        new_conditions = f.read()
                        for (i, value) in enumerate(condition[1]):
                            new_conditions = re.sub(condition[1][i],
                                                    condition[2][i],
                                                    new_conditions)
                        new_conditions = json.loads(
                            new_conditions)['conditions']
                    loaded_conditions.append(new_conditions)
                else:
                    loaded_conditions.append(condition)
            definition['conditions'] = loaded_conditions
            string_definition = json.dumps(definition)
            # Set parameters
            parameters = re.findall(r'(_ARG_([a-zA-Z0-9]+)_)',
                                    string_definition)
            for param in parameters:
                index = int(param[1])
                if len(self.args) <= index:
                    string_definition = string_definition.replace(param[0], '')
                elif type(self.args[index]) == list:
                    value = '[ %s ]' % ', '.join('"%s"' % v
                                                 for v in self.args[index])
                    string_definition = string_definition.replace(
                        '"%s"' % param[0], value)
                else:
                    string_definition = string_definition.replace(
                        param[0], self.args[index])
            # Strip dots if necessary
            stripdots = re_strip_dots.findall(string_definition)
            for value in stripdots:
                string_definition = string_definition.replace(
                    value[0], value[1].replace('.', ''))
            definition = json.loads(string_definition)
            # Set special values (IP ranges, AWS account ID, ...)
            for condition in definition['conditions']:
                if type(condition) != list \
                        or len(condition) == 1 \
                        or type(condition[2]) == list \
                        or type(condition[2]) == dict:
                    continue
                for testcase in testcases:
                    result = testcase['regex'].match(condition[2])
                    if result and (testcase['name'] == 'ip_ranges_from_file'
                                   or testcase['name']
                                   == 'ip_ranges_from_local_file'):
                        filename = result.groups()[0]
                        conditions = result.groups()[1] if len(
                            result.groups()) > 1 else []
                        # TODO :: handle comma here...
                        if filename == ip_ranges_from_args:
                            prefixes = []
                            for filename in ip_ranges:
                                prefixes += read_ip_ranges(
                                    filename,
                                    local_file=True,
                                    ip_only=True,
                                    conditions=conditions)
                            condition[2] = prefixes
                            break
                        else:
                            local_file = True if testcase[
                                'name'] == 'ip_ranges_from_local_file' else False
                            condition[2] = read_ip_ranges(
                                filename,
                                local_file=local_file,
                                ip_only=True,
                                conditions=conditions)
                            break
                    elif result:
                        condition[2] = params[testcase['name']]
                        break

            if len(attributes) == 0:
                attributes = [attr for attr in definition]
            for attr in attributes:
                if attr in definition:
                    setattr(self, attr, definition[attr])
            if hasattr(self, 'path'):
                self.service = format_service_name(self.path.split('.')[0])
            if not hasattr(self, 'key'):
                setattr(self, 'key', self.filename)
            setattr(self, 'key', self.key.replace('.json', ''))
            if self.key_suffix:
                setattr(self, 'key', f'{self.key}-{self.key_suffix}')
        except Exception as e:
            print_exception(f'Failed to set definition {self.filename}: {e}')