def get_bucket_acl(trace_id, check_history_id, organization_id, project_id, aws_account, region_name, bucket_name, s3_client): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ACL_" + region_name + "_" + bucket_name + ".json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: bucket_acl = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: bucket_acl = S3Utils.get_bucket_acl( trace_id, s3_client, bucket_name, aws_account, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する。(アクセスコントロールリスト情報) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_acl, s3_file_name) except PmError as e: pm_logger.error("[%s] S3バケットACL情報のS3保存に失敗しました。(%s)/(%s)", aws_account, region_name, bucket_name) return bucket_acl
def get_account_password_policy(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "IBP/IAM_AccountPasswordPolicy.json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: account_password_policy = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: account_password_policy = IAMUtils.get_account_password_policy( trace_id, session, awsaccount) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # アカウントパスワードポリシー情報をS3に保存します。 try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", account_password_policy, s3_file_name) except PmError as e: pm_logger.error("[%s] アカウントパスワードポリシー情報のS3保存に失敗しました。", awsaccount) raise common_utils.write_log_pm_error(e, pm_logger) return account_password_policy
def get_trail_event_selectors(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, trail_client, cloud_trail): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudTrail_EventSelectors_" + region_name + "_" + cloud_trail["Name"] + ".json") event_selectors = [] # Check Exist File リソース情報ファイル if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: event_selectors = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: event_selectors = CloudTrailUtils.get_event_selectors( trace_id, awsaccount, trail_client, region_name, cloud_trail["TrailARN"]) # Upload File リソース情報ファイル try: FileUtils.upload_s3(trace_id, event_selectors, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] CloudTrailのイベントセレクタ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) return event_selectors
def get_list_buckets(trace_id, check_history_id, organization_id, project_id, s3_client, aws_account): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ListBuckets.json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: list_buckets = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_exception(e, pm_logger) else: # S3バケット一覧を取得します。 try: list_buckets = S3Utils.list_buckets(trace_id, s3_client, aws_account) except PmError as e: raise common_utils.write_log_exception(e, pm_logger) # S3バケット一覧情報をS3に保存します。 try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", list_buckets, s3_file_name) except PmError as e: pm_logger.error("[%s] S3バケット一覧情報のS3保存に失敗しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) return list_buckets
def test_read_json_error_client(self): # create mock error client s3 self.create_mock_boto3_client_error() expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE) expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME) FileUtils.global_s3_client = None with self.assertRaises(PmError) as exception: # call function test FileUtils.read_json(trace_id, 'S3_CHECK_BUCKET', s3_file_name) # check error actual_cause_error = exception.exception.cause_error self.assertEqual(expected_error_response['Error'], actual_cause_error.response['Error']) self.assertEqual(expected_operation_name, actual_cause_error.operation_name)
def test_read_json_error_call_get_object(self): # create mock throw error when called function get_object expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE) expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME) with patch.object(client_s3, 'get_object') as mock_method: mock_method.side_effect = ClientError( error_response=expected_error_response, operation_name=expected_operation_name) with self.assertRaises(PmError) as exception: # call function test FileUtils.read_json(trace_id, 'S3_CHECK_BUCKET', s3_file_name) # check error actual_cause_error = exception.exception.cause_error self.assertEqual(expected_error_response['Error'], actual_cause_error.response['Error']) self.assertEqual(expected_operation_name, actual_cause_error.operation_name)
def check_asc_item_copy_cis_check(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path, check_cis_item): cw_logger = common_utils.begin_cw_logger(trace_id, __name__, inspect.currentframe()) error_cis = LIST_ERROR_CIS[check_cis_item] try: try: check_result_items = pm_checkResultItems.query_by_check_history_id_and_check_item_code_and_aws_account( trace_id, check_history_id, check_cis_item, aws_account, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] %sのチェック結果を取得できませんでした。", aws_account, error_cis) return CheckResult.Error # チェック結果が「マネージド-1」だった場合は、チェック結果ファイルは作られていないのでファイル取得はスキップします。 check_result = check_result_items[0]["CheckResult"] if check_result == CheckResult.MembersManagement: return check_result s3_file_name = CommonConst.PATH_CHECK_RESULT.format( check_history_id, organization_id, project_id, aws_account, check_cis_item + ".json") try: check_results_s3 = FileUtils.read_json(trace_id, CommonConst.S3_CHECK_BUCKET, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] %sのチェック結果ファイルを取得できませんでした。", aws_account, error_cis) return CheckResult.Error except Exception as e: cw_logger.error("[%s] チェック処理中にエラーが発生しました。", aws_account) return CheckResult.Error # Export File Json try: FileUtils.upload_s3(trace_id, check_results_s3, result_json_path, format_json=True, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 return check_result
def list_subscriptions_by_topic(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, sns_client, topic_arn): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) topic_name = topic_arn.split(":")[5] s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "SNS_Topic_Subscriptions_Info_" + region_name + "_" + topic_name + ".json") subscriptions = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: subscriptions = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にCloudTrail情報を取得する。 try: subscriptions = SNSUtils.list_subscriptions_by_topic( trace_id, sns_client, awsaccount, region_name, topic_arn) except PmError as e: data_body = {'TopicArn': topic_arn} e.pm_notification_error = PmNotificationError( code_error=CommonConst.KEY_CODE_ERROR_GET_SUBSCRIPTIONS, data_body=data_body) raise common_utils.write_log_pm_error(e, pm_logger) # 取得したSNS Topicサブスクリプション情報情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", subscriptions, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] SNS Topicサブスクリプション情報の情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(subscriptions) == 0): pm_logger.warning("[%s/%s] SNS Topicサブスクリプション情報情報の取得件数が0でした。", awsaccount, region_name) return subscriptions
def describe_metric_filters(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, logs_client, cloud_trail_log_group_name): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudWatch_Logs_Metric_Filters_Info_" + region_name + "_" + cloud_trail_log_group_name.replace( CommonConst.SLASH, "-SLASH-") + ".json") metric_filters = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: metric_filters = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: metric_filters = LogsUtils.describe_metric_filters( trace_id, logs_client, awsaccount, region_name, cloud_trail_log_group_name) except PmError as e: data_body = {'CloudTrailLogGroupName': cloud_trail_log_group_name} e.pm_notification_error = PmNotificationError( code_error=CommonConst.KEY_CODE_ERROR_GET_METRIC_FILTERS, data_body=data_body) raise common_utils.write_log_pm_error(e, pm_logger) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", metric_filters, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] メトリクスフィルタ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(metric_filters) == 0): pm_logger.warning("[%s/%s] メトリクスフィルタ情報の取得件数が0でした。", awsaccount, region_name) return metric_filters
def test_read_json_success(self): # prepare data bucket_name = common_utils.get_environ("S3_CHECK_BUCKET") client_s3.create_bucket(Bucket=bucket_name) body = json.dumps(data_test_upload_s3) client_s3.put_object(Bucket=bucket_name, Key=s3_file_name, Body=body) with patch.object(PmLogAdapter, 'info', return_value=None) as mock_method_info: with patch.object(boto3, "client", return_value=client_s3): # call function test actual_result = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) # check result self.assertEqual(data_test_upload_s3, actual_result) # check write log info mock_method_info.assert_any_call('read json success')
def describe_alarms(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, cloudwatch_client): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudWatch_Alarm_Info_" + region_name + ".json") metric_alarms = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: metric_alarms = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: metric_alarms = CloudWatchlUtils.describe_alarms( trace_id, awsaccount, cloudwatch_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したCloudWatchAlarm情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", metric_alarms, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] CloudWatchAlarmの情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(metric_alarms) == 0): pm_logger.warning("[%s/%s] CloudWatchAlarm情報の取得件数が0でした。", awsaccount, region_name) return metric_alarms
def get_cloud_trails(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, session): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudTrail_" + region_name + ".json") cloud_trails = [] # Check Exist File S3 CloudTrail_{region_name}.json if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: cloud_trails = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にCloudTrail情報を取得する。 trail_client = CloudTrailUtils.get_trail_client( trace_id, session, region_name, awsaccount) try: cloud_trails = CloudTrailUtils.describe_cloud_trails( trace_id, awsaccount, trail_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(cloud_trails) == 0): pm_logger.warning("[%s/%s] CloudTrail情報の取得件数が0でした。", awsaccount, region_name) # 取得したCloudTrail情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", cloud_trails, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] CloudTrailの情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) return cloud_trails
def get_security_check_resource(trace_id, coop_id, project_id, organization_id, check_item_code): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) # リソース関連性のバリデーションチェックを行います。 try: awscoop_item = pm_awsAccountCoops.query_awscoop_coop_key( trace_id, coop_id) except Exception as e: return common_utils.error_exception(MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) if not awscoop_item: return common_utils.error_common( MsgConst.ERR_AWS_401, HTTPStatus.UNPROCESSABLE_ENTITY, pm_logger) # 最新チェック結果テーブルのチェック履歴IDCheckHistoryIDを取得します。 try: latest_check_result = pm_latestCheckResult.query_key( trace_id, project_id, organization_id) except PmError as e: return common_utils.error_exception(MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) if not latest_check_result: response = common_utils.get_response_by_response_body( HTTPStatus.OK, []) return common_utils.response(response, pm_logger) check_history_id = latest_check_result['CheckHistoryID'] # チェック結果詳細情報を取得します。 try: check_result_items = pm_checkResultItems.get_security_check_detail_by_check_result_and_check_item_code( trace_id, check_history_id, check_item_code, awscoop_item['AWSAccount']) except PmError as e: return common_utils.error_exception(MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) if not check_result_items: response = common_utils.get_response_by_response_body( HTTPStatus.OK, []) return common_utils.response(response, pm_logger) # 個別チェック結果レコードに対して、各チェック結果に作成されたチェック結果ファイルを取得する。 response_body = [] for check_result_item in check_result_items: try: result_json_check_result_item = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", check_result_item["ResultJsonPath"]) except PmError as e: if e.cause_error.response['Error'][ 'Code'] == CommonConst.NO_SUCH_KEY: return common_utils.error_exception( MsgConst.ERR_S3_702, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) else: return common_utils.error_exception( MsgConst.ERR_S3_709, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) response_body.append( get_response_body(check_result_item, result_json_check_result_item)) # return response data response = common_utils.get_response_by_response_body( HTTPStatus.OK, response_body) return common_utils.response(response, pm_logger)
def execute_check_results_assessment(trace_id, check_item_code, check_history_id, organization_id, project_id, aws_account, result_json_path, level): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format(check_history_id, organization_id, project_id, aws_account, "Assessment_Result.json") try: # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: assessment_items = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: assessment_items = pm_assessmentItems.query_organization_index_filter_awsaccount( trace_id, organization_id, project_id, aws_account) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したクエリ結果をS3に保存します。(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", assessment_items, s3_file_name) except PmError as e: pm_logger.error("[%s] リソース情報ファイのS3保存に失敗しました。", aws_account) # チェック処理 try: check_results = [] assessment_result = None for assessment_item in assessment_items: if (assessment_item['CheckItemCode'] == check_item_code): assessment_result = { 'AssessmentComment': common_utils.get_value('AssessmentComment', assessment_item), 'UserID': common_utils.get_value('UserID', assessment_item), 'MailAddress': common_utils.get_value('MailAddress', assessment_item), 'CreatedAt': assessment_item['CreatedAt'] } break if (assessment_result is None): LEVEL_DIVISION = { "1": CommonConst.LEVEL_CODE_21, # 重大な不備 "2": CommonConst.LEVEL_CODE_11 # 軽微な不備 } result = { 'Region': 'Global', 'Level': LEVEL_DIVISION[level], 'DetectionItem': { 'NoEvaluation': True } } check_results.append(result) except Exception as e: pm_logger.error("[%s] チェック処理中にエラーが発生しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) # チェック結果JSONファイル try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_result_json = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } if assessment_result is not None: check_result_json['AssessmentResult'] = assessment_result FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", check_result_json, result_json_path) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # チェック結果 if len(check_results) == 0: return CheckResult.Normal elif (level == CommonConst.LEVEL_CODE_1): return CheckResult.CriticalDefect return CheckResult.MinorInadequacies
def execute_security_group_port(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path, port, check_item_code, excluded_resources): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] # Export File VPC_SecurityGroups_{region}.json try: regions = aws_common.get_regions(trace_id, session) except PmError as e: pm_logger.error("Regionの情報の取得に失敗しました。") e.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(e, pm_logger) for region in regions: region_name = region["RegionName"] try: if region_name in REGION_IGNORE: continue ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name, awsaccount) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SecurityGroups_" + region_name + ".json") if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: security_groups = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: security_groups = Ec2Utils.describe_security_groups( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(security_groups) == 0): pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。", awsaccount, region_name) continue try: FileUtils.upload_s3(trace_id, security_groups, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) try: for security_group in security_groups: # check excluded resources resource_name = security_group['GroupId'] if common_utils.check_excluded_resources( check_item_code, region_name, ResourceType.GroupId, resource_name, excluded_resources): continue for ip_permission in security_group['IpPermissions']: if ip_permission['IpProtocol'] != '-1': if ip_permission['IpProtocol'] != CommonConst.TCP: continue if common_utils.check_key( 'FromPort', ip_permission ) is False or ip_permission['FromPort'] > port: continue if common_utils.check_key( 'ToPort', ip_permission ) is False or ip_permission['ToPort'] < port: continue for ip_range in ip_permission['IpRanges']: if common_utils.check_key('CidrIp', ip_range): if (CommonConst.CIDR_IP_NOT_SECURITY == ip_range['CidrIp']): check_result = get_check_result( security_group, ip_permission, ip_range, region_name) check_results.append(check_result) break except Exception as e: pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, region=region_name, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) result_security_group = CheckResult.Normal if (len(check_results) > 0): result_security_group = CheckResult.CriticalDefect # 検出結果を1つのチェック結果JSONファイルに保存する。 try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_rule_security_group = { 'AWSAccount': awsaccount, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_rule_security_group, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount) pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) return result_security_group
def check_cis_item_4_03(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path, check_item_code, excluded_resources): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] try: regions = aws_common.get_regions(trace_id, session) except PmError as e: pm_logger.error("Regionの情報の取得に失敗しました。") e.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(e, pm_logger) for region in regions: region_name = region["RegionName"] try: if region_name in REGION_IGNORE: continue ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name, awsaccount) # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にセキュリティグループ情報を取得する。 s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SecurityGroups_" + region_name + ".json") if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: security_groups = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: security_groups = Ec2Utils.describe_security_groups( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(security_groups) == 0): pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。", awsaccount, region_name) try: if (len(security_groups) > 0): FileUtils.upload_s3(trace_id, security_groups, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にEC2インスタンス情報を取得する。 s3_file_name_iam_instances = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "IAM_Instances_" + region_name + ".json") if (aws_common.check_exists_file_s3( trace_id, "S3_CHECK_BUCKET", s3_file_name_iam_instances)) is True: try: reservation_instances = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name_iam_instances) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: reservation_instances = Ec2Utils.describe_instances( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(reservation_instances) == 0): pm_logger.info("[%s/%s] EC2インスタンス情報の取得件数が0でした。", awsaccount, region_name) try: if (len(reservation_instances) > 0): s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SG_Instances_" + region_name + ".json") FileUtils.upload_s3(trace_id, reservation_instances, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] EC2インスタンス情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) check1 = [] check2 = [] try: # リソース情報ファイルのセキュリティグループ情報から、インバウンドルール、アウトバウンドルールを設定しているdefaultセキュリティグループを検出する。 for security_group in security_groups: # check excluded resources resource_name = security_group['GroupId'] if common_utils.check_excluded_resources( check_item_code, region_name, ResourceType.GroupId, resource_name, excluded_resources): continue if (security_group['GroupName'] == CommonConst.DEFAULT and len(security_group['IpPermissions']) > 0 and len(security_group['IpPermissionsEgress']) > 0): check1.append(security_group['GroupId']) # リソース情報ファイルのEC2インスタンス情報から、defaultセキュリティグループをアタッチしたEC2インスタンスを検出する、。 for reservation_instance in reservation_instances: for instance in reservation_instance['Instances']: for security_group in instance['SecurityGroups']: if security_group[ 'GroupName'] == CommonConst.DEFAULT: if common_utils.check_key('Tags', instance) is True: name_tag = next( filter( lambda tag: tag['Key'] == 'Name', instance['Tags']), None) instance[ 'InstanceName'] = None if name_tag is None else name_tag[ 'Value'] check2.append(instance) if (len(check1) > 0 or len(check2) > 0): check_results.append( get_check_cis_item_4_03_result(check1, check2, region_name)) except Exception as e: pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, region=region_name, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # Export File CHECK_CIS12_ITEM_4_03.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_cis_item_4_03 = { 'AWSAccount': awsaccount, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_cis_item_4_03, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount) pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # チェック結果 if len(check_results) > 0: return CheckResult.MinorInadequacies return CheckResult.Normal
def get_security_check_detail(trace_id, organization_id, project_id, check_history_id=None, group_filter=None): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) # リソース関連性のバリデーションチェックを行います。 try: project = pm_projects.get_projects_by_organization_id( trace_id, project_id, organization_id) except PmError as e: return common_utils.error_exception(MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) if (not project): return common_utils.error_common( MsgConst.ERR_AWS_401, HTTPStatus.UNPROCESSABLE_ENTITY, pm_logger) # 最新チェック結果のチェック履歴ID CheckHistoryIDを取得します。 if check_history_id is None: try: lastest_check_result = pm_latestCheckResult.query_key( trace_id, project_id, organization_id) except PmError as e: return common_utils.error_exception( MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) if (not lastest_check_result): response = common_utils.get_response_by_response_body( HTTPStatus.OK, []) return common_utils.response(response, pm_logger) check_history_id = lastest_check_result["CheckHistoryID"] else: try: lastest_check_result = pm_latestCheckResult.query_key_by_check_history_id( trace_id, project_id, organization_id, check_history_id) except PmError as e: return common_utils.error_exception( MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) if (not lastest_check_result): return common_utils.error_common(MsgConst.ERR_301, HTTPStatus.NOT_FOUND, pm_logger) # チェック結果詳細情報を取得します。 try: if common_utils.is_null(group_filter) is True: check_result_items = pm_checkResultItems.get_security_check_detail( trace_id, check_history_id) else: check_result_items = pm_checkResultItems.get_security_check_detail( trace_id, check_history_id, CommonConst.GROUP_FILTER_TEMPLATE.format(group_filter)) except PmError as e: return common_utils.error_exception(MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) if (not check_result_items): response = common_utils.get_response_by_response_body( HTTPStatus.OK, []) return common_utils.response(response, pm_logger) # 個別チェック結果レコードに対して、各チェック結果に作成されたチェック結果ファイルを取得する。 response_body = [] for check_result_item in check_result_items: try: data = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", check_result_item["ResultJsonPath"]) except PmError as e: if e.cause_error.response['Error'][ 'Code'] == CommonConst.NO_SUCH_KEY: return common_utils.error_exception( MsgConst.ERR_S3_702, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) else: return common_utils.error_exception( MsgConst.ERR_S3_709, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True) response_body.append(get_response_body(check_result_item, data)) # return response data response = common_utils.get_response_by_response_body( HTTPStatus.OK, response_body) return common_utils.response(response, pm_logger)