def elapsed_wrapper(*arguments): start = datetime.now() return_val = function(*arguments) end = datetime.now() _message = message or 'The function ' + f'`{function.__name__}`' + 'took {} seconds' logger.info(_message.format((end - start).total_seconds())) return return_val
def _get_and_save_data(output_file, handler, method_to_call, parameters, checks, region, summary: Queue): """ Calls the AWS API function and downloads the data check: Value to check and repeat the call if it fails summary: Keeps tracks of failures """ # TODO: Decorate this with rate limiters from # https://github.com/Netflix-Skunkworks/cloudaux/blob/master/cloudaux/aws/decorators.py if os.path.isfile(output_file): # Data already scanned, so skip logger.warning( " Response already present at {}".format(output_file)) return call_summary = { "service": handler.meta.service_model.service_name, "action": method_to_call, "parameters": parameters, "region": region } data = AwsScanner._get_data(output_file, handler, method_to_call, parameters, checks, call_summary) AwsScanner._remove_unused_values(data) AwsScanner._save_results_to_file(output_file, data) logger.info("finished call for {}".format(output_file)) summary.put_nowait(call_summary)
def get_session_using_assume_role(role_arn: str, external_id: str, region: Optional[str] = None, session_duration: int = 3600): role_session_name = "DragoneyeSession" logger.info( 'Will try to assume role using ARN: {} and external id {}...'. format(role_arn, external_id)) client = boto3.client('sts') response = client.assume_role(RoleArn=role_arn, RoleSessionName=role_session_name, DurationSeconds=session_duration, ExternalId=external_id) credentials = response['Credentials'] session_data = { "aws_access_key_id": credentials['AccessKeyId'], "aws_secret_access_key": credentials['SecretAccessKey'], "aws_session_token": credentials['SessionToken'] } if region: session_data['region_name'] = region session = boto3.Session(**session_data) AwsSessionFactory.test_connectivity(session) logger.info('Session was created successfully') return session
def _should_run_command_on_region(self, runner: dict, region_dict: dict) -> bool: if runner["Service"] in self.universal_services: if region_dict["RegionName"] != self.default_region: return False elif runner["Service"] != 'eks' and region_dict[ "RegionName"] not in self.session.get_available_regions( runner["Service"]): logger.info( "Skipping region {}, as {} does not exist there".format( region_dict["RegionName"], runner["Service"])) return False return True
def _create_regions_file_structure(self, base_path: str): region_list = self._get_region_list() with open(f"{base_path}/describe-regions.json", "w+") as file: file.write(json.dumps(region_list, indent=4, sort_keys=True)) logger.info("* Creating directory for each region name") region_dict_list: List[dict] = region_list["Regions"] for region in region_dict_list: make_directory( os.path.join(base_path, region.get("RegionName", "Unknown"))) return region_dict_list
def get_session(profile_name: Optional[str] = None, region: Optional[str] = None): start_msg = 'Will try to create a session' session_data = {} if region: session_data["region_name"] = region if profile_name: session_data["profile_name"] = profile_name start_msg = f'{start_msg} using profile {profile_name}' else: start_msg = f'{start_msg} using AWS auth-chain' logger.info(f'{start_msg}...') session = boto3.Session(**session_data) AwsSessionFactory.test_connectivity(session) logger.info('Session was created successfully') return session
def _call_boto_function(output_file, handler, method_to_call, parameters): data = {} if handler.can_paginate(method_to_call): paginator = handler.get_paginator(method_to_call) page_iterator = paginator.paginate(**parameters) for response in page_iterator: if not data: data = response else: logger.info(" ...paginating {}".format(output_file)) for k in data: if isinstance(data[k], list): data[k].extend(response[k]) else: function = getattr(handler, method_to_call) data = function(**parameters) return data
def _get_region_list(self): regions_filter = None if len(self.settings.regions_filter) > 0: regions_filter = self.settings.regions_filter.lower().split(",") # Force include of default region -- seems to be required if self.default_region not in regions_filter: regions_filter.append(self.default_region) logger.info("* Getting region names") ec2 = self.session.client("ec2", region_name=self.default_region) region_list = ec2.describe_regions() if regions_filter is not None: filtered_regions = [ r for r in region_list["Regions"] if r["RegionName"] in regions_filter ] region_list["Regions"] = filtered_regions return region_list
def _print_summary(summary: Queue): logger.info( "--------------------------------------------------------------------" ) failures = [] for call_summary in summary.queue: if "exception" in call_summary: failures.append(call_summary) logger.info("Summary: {} APIs called. {} errors".format( len(summary.queue), len(failures))) if len(failures) > 0: logger.warning("Failures:") for call_summary in failures: logger.warning(" {}.{}({}): {}".format( call_summary["service"], call_summary["action"], call_summary["parameters"], call_summary["exception"], ))
def get_authorization_token(tenant_id: str, client_id: str, client_secret: str) -> str: logger.info('Will try to generate JWT bearer token...') response = requests.post( url=f'https://login.microsoftonline.com/{tenant_id}/oauth2/token', data={ 'grant_type': 'client_credentials', 'client_id': client_id, 'client_secret': client_secret, 'resource': 'https://management.azure.com/' }) if response.status_code != 200: raise DragoneyeException( f'Failed to authenticate. status code: {response.status_code}\n' f'Reason: {response.text}') response_body = json.loads(response.text) access_token = response_body['access_token'] logger.info('JWT bearer token generated successfully') return f'Bearer {access_token}'
def _on_backoff_giveup(details: dict) -> None: logger.info('Given up on request for {args[0]}'.format(**details))
def _on_backoff_predicate(details: dict) -> None: logger.info( 'Attempt #{tries} failed. Invoked request took {elapsed:0.6f} seconds for call {args[0]}' .format(**details))
def _on_backoff_success(details: dict) -> None: logger.info( 'Invoked request took {elapsed:0.6f} seconds for call {args[0]}'. format(**details))
def _run_scan_commands(self, region, runner, account_dir, summary: Queue): region = copy.deepcopy(region) runner = copy.deepcopy(runner) region_name = region["RegionName"] logger.info("* Getting {}:{}:{} info".format(region["RegionName"], runner["Service"], runner["Request"])) if not self._should_run_command_on_region(runner, region): return handler = self.session.client( runner["Service"], region_name=region["RegionName"], config=Config( retries={ 'max_attempts': self.settings.max_attempts, 'mode': 'standard' }, max_pool_connections=self.settings.max_pool_connections)) filepath = os.path.join(account_dir, region["RegionName"], f'{runner["Service"]}-{runner["Request"]}') method_to_call = snakecase(runner["Request"]) parameter_keys = set() param_groups = self._get_parameter_group(runner, account_dir, region, parameter_keys) tasks: List[ThreadedFunctionData] = [] if runner.get("Parameters"): make_directory(filepath) for param_group in param_groups: if set(param_group.keys()) != parameter_keys: continue unparsed_file_name = '_'.join([ f'{k}-{v}' if not isinstance(v, list) else k for k, v in param_group.items() ]) file_name = urllib.parse.quote_plus(unparsed_file_name) output_file = f"{filepath}/{file_name}.json" tasks.append( ThreadedFunctionData( AwsScanner._get_and_save_data, (output_file, handler, method_to_call, param_group, runner.get("Check", None), region_name, summary), 'exception on command {}'.format(runner), 'timeout on command {}'.format(runner))) else: output_file = filepath + ".json" tasks.append( ThreadedFunctionData( AwsScanner._get_and_save_data, (output_file, handler, method_to_call, {}, runner.get("Check", None), region_name, summary), 'exception on command {}'.format(runner), 'timeout on command {}'.format(runner))) deque_tasks: Deque[List[ThreadedFunctionData]] = collections.deque() deque_tasks.append(tasks) max_workers = 10 if runner['Service'] == 'lambda' else 20 execute_parallel_functions_in_threads(deque_tasks, max_workers, self.settings.command_timeout)
def _get_data(output_file, handler, method_to_call, parameters, checks, call_summary): logger.info(" Making call for {}".format(output_file)) data = None try: for retries in range(MAX_RETRIES): data = AwsScanner._call_boto_function(output_file, handler, method_to_call, parameters) if not checks or AwsScanner._is_data_passing_check( data, checks): break elif retries == MAX_RETRIES - 1: raise Exception( "One of the following checks has repeatedly failed: {}" .format(', '.join(f'{check["Name"]}={check["Value"]}' for check in checks))) else: logger.info(" Sleeping and retrying") time.sleep(3) except ClientError as ex: if "NoSuchBucketPolicy" in str(ex): # This error occurs when you try to get the bucket policy for a bucket that has no bucket policy, so this can be ignored. logger.warning(" - No bucket policy") elif "NoSuchPublicAccessBlockConfiguration" in str(ex): # This error occurs when you try to get the account Public Access Block policy for an account that has none, so this can be ignored. logger.warning(" - No public access block set") elif ("ServerSideEncryptionConfigurationNotFoundError" in str(ex) and call_summary["service"] == "s3" and call_summary["action"] == "get_bucket_encryption"): logger.warning(" - No encryption set") elif ("NoSuchEntity" in str(ex) and call_summary["action"] == "get_account_password_policy"): logger.warning(" - No password policy set") elif ("AccessDeniedException" in str(ex) and call_summary["service"] == "organizations" and call_summary["action"] == "list_accounts"): logger.warning( " - Denied, which likely means this is not the organization root" ) elif ("RepositoryPolicyNotFoundException" in str(ex) and call_summary["service"] == "ecr" and call_summary["action"] == "get_repository_policy"): logger.warning(" - No policy exists") elif ("ResourceNotFoundException" in str(ex) and call_summary["service"] == "lambda" and call_summary["action"] == "get_policy"): logger.warning(" - No policy exists") elif ("AccessDeniedException" in str(ex) and call_summary["service"] == "kms" and call_summary["action"] == "list_key_policies"): logger.warning( " - Denied, which should mean this KMS has restricted access" ) elif ("AccessDeniedException" in str(ex) and call_summary["service"] == "kms" and call_summary["action"] == "list_grants"): logger.warning( " - Denied, which should mean this KMS has restricted access" ) elif ("AccessDeniedException" in str(ex) and call_summary["service"] == "kms" and call_summary["action"] == "get_key_policy"): logger.warning( " - Denied, which should mean this KMS has restricted access" ) elif ("AccessDeniedException" in str(ex) and call_summary["service"] == "kms" and call_summary["action"] == "get_key_rotation_status"): logger.warning( " - Denied, which should mean this KMS has restricted access" ) elif "AWSOrganizationsNotInUseException" in str(ex): logger.warning( ' - Your account is not a member of an organization.') elif ("EntityNotFoundException" in str(ex) and call_summary["service"] == "glue" and call_summary["action"] == "get_resource_policy"): logger.warning( f' - Glue policy does not exist on region {call_summary["region"]}' ) elif ("NoSuchEntity" in str(ex)): logger.warning(f" - {str(ex)}") elif ("NoSuchAccessPointPolicy" in str(ex)): logger.warning(f" - {str(ex)}") else: logger.warning(f"ClientError {retries}: {ex}") call_summary["exception"] = ex except EndpointConnectionError as ex: logger.warning("EndpointConnectionError: {}".format(ex)) call_summary["exception"] = ex except Exception as ex: logger.warning("Exception: {}".format(ex)) call_summary["exception"] = ex return data