예제 #1
0
    def test_describe_flow_log_error(self):
        # create data mock
        ec2_client = ec2_utils_mock.client_connect()

        with patch.object(PmLogAdapter, 'error',
                          return_value=None) as mock_error:
            with patch.object(ec2_client, 'describe_flow_logs') as mock_method:
                mock_method.side_effect = ClientError(
                    {
                        'Error': {
                            'Code': data_client_error['service_error_code'],
                            'Message': data_client_error['service_message']
                        }
                    }, 'EXCEPTION')
                with self.assertRaises(PmError) as exception:
                    # Call function test
                    ec2_utils.describe_flow_logs(trace_id, aws_account,
                                                 ec2_client, region_name)

                cause_error = exception.exception.cause_error.response['Error']
                # Check result
                self.assertEqual(data_client_error['service_error_code'],
                                 cause_error['Code'])
                self.assertEqual(data_client_error['service_message'],
                                 cause_error['Message'])
        mock_error.assert_any_call("[%s/%s] VPCフローログ情報の取得に失敗しました。",
                                   aws_account, region_name)
예제 #2
0
 def test_describe_instances_not_exist(self):
     # create data mock
     ec2_client = ec2_utils_mock.client_connect()
     # Call function test
     actual_instances = ec2_utils.describe_instances(
         trace_id, aws_account, ec2_client, region_name)
     # Check result
     self.assertListEqual([], actual_instances)
예제 #3
0
    def test_describe_vpcs_empty(self):
        # create data mock
        ec2_client = ec2_utils_mock.client_connect()

        with patch.object(ec2_client, 'describe_vpcs') as mock_method:
            mock_method.return_value = {}
            actual_describe_vpcs = ec2_utils.describe_vpcs(
                trace_id, aws_account, ec2_client, region_name)
            self.assertListEqual([], actual_describe_vpcs)
예제 #4
0
    def test_describe_vpcs_suscess(self):
        # create data mock
        ec2_client = ec2_utils_mock.client_connect()
        # Call function test
        expect_list_vpc = ec2_client.describe_vpcs()['Vpcs']

        actual_describe_vpcs = ec2_utils.describe_vpcs(trace_id, aws_account,
                                                       ec2_client, region_name)
        # Check result
        self.assertListEqual(expect_list_vpc, actual_describe_vpcs)
 def test_describe_volumes_not_exist(self):
     # create data mock
     ec2_client = ec2_utils_mock.client_connect()
     # Call function test
     with patch.object(ec2_client, 'describe_volumes') as mock_obj:
         mock_obj.return_value = {}
         actual_volumes = ec2_utils.describe_volumes(
             trace_id, aws_account, ec2_client, region_name)
         # Check result
     self.assertEqual([], actual_volumes)
예제 #6
0
 def test_describe_not_exist_flow_logs(self):
     # create data mock
     ec2_client = ec2_utils_mock.client_connect()
     except_result = copy.deepcopy(
         DataTestEC2.DATA_DESCRIBE_FLOW_LOGS_EMPTY)
     with patch.object(ec2_client, 'describe_flow_logs') as mock_obj:
         mock_obj.return_value = except_result
         # Call function test
         actual_instances = ec2_utils.describe_flow_logs(
             trace_id, aws_account, ec2_client, region_name)
     # Check result
     self.assertListEqual([], actual_instances)
예제 #7
0
 def test_describe_instances_exist_next_token(self):
     # create data mock
     ec2_client = ec2_utils_mock.client_connect()
     with patch.object(ec2_client, 'describe_instances') as mock_obj:
         mock_obj.side_effect = ec2_utils_mock.side_effect_describe_instances
         # Call function test
         actual_instances = ec2_utils.describe_instances(
             trace_id, aws_account, ec2_client, region_name)
     # Check result
     expect_resual = copy.deepcopy(
         DataTestEC2.DATA_CHECK_ALL_DESCRIBE_INSTANCES['Reservations'])
     self.assertListEqual(expect_resual, actual_instances)
예제 #8
0
    def test_describe_security_groups_not_exist(self):
        # create data mock
        ec2_client = ec2_utils_mock.client_connect()

        with patch.object(ec2_client,
                          'describe_security_groups') as mock_method:
            mock_method.return_value = {}
            # Call function test
            actual_security_groups = ec2_utils.describe_security_groups(
                trace_id, aws_account, ec2_client, region_name)
        # Check result
        self.assertListEqual([], actual_security_groups)
예제 #9
0
    def test_describe_flow_log_exist_next_token(self):
        # create data mock
        ec2_client = ec2_utils_mock.client_connect()

        with patch.object(ec2_client, 'describe_flow_logs') as mock_obj:
            mock_obj.side_effect = ec2_utils_mock.side_effect_describe_flow_log
            # Call function test
            actual_instances = ec2_utils.describe_flow_logs(
                trace_id, aws_account, ec2_client, region_name)
        # Check result
        except_result = copy.deepcopy(
            DataTestEC2.DATA_CHECK_ALL_DESCRIBE_FLOW_LOGS['FlowLogs'])
        self.assertListEqual(except_result, actual_instances)
예제 #10
0
    def test_describe_instances_success(self):
        # create data mock
        ec2_resource_connect = ec2_utils_mock.resource_connect()
        ec2_client = ec2_utils_mock.client_connect()

        ec2_resource_connect.create_instances(ImageId='',
                                              MinCount=1,
                                              MaxCount=1)
        expect_response = ec2_client.describe_instances()
        # Call function test
        actual_instances = ec2_utils.describe_instances(
            trace_id, aws_account, ec2_client, region_name)
        # Check result
        self.assertListEqual(expect_response['Reservations'], actual_instances)
예제 #11
0
    def test_describe_security_groups_exist_next_token(self):
        # create data mock
        ec2_client = ec2_utils_mock.client_connect()

        with patch.object(ec2_client, 'describe_security_groups') as mock_obj:
            mock_obj.side_effect = ec2_utils_mock.side_effect_describe_security_groups
            # Call function test
            actual_security_groups = ec2_utils.describe_security_groups(
                trace_id, aws_account, ec2_client, region_name)
        # Check result
        expect_data = copy.deepcopy(
            DataTestEC2.
            DATA_CHECK_ALL_DESCRIBE_SECURITY_GROUPS['SecurityGroups'])
        self.assertListEqual(expect_data, actual_security_groups)
예제 #12
0
    def test_describe_security_groups_success(self):
        # create data mock
        ec2_client = ec2_utils_mock.client_connect()
        group_name_test = "group_name_test"
        ec2_client.create_security_group(GroupName=group_name_test,
                                         Description='description_test')

        expect_describe = ec2_client.describe_security_groups()
        # Call function test
        actual_security_groups = ec2_utils.describe_security_groups(
            trace_id, aws_account, ec2_client, region_name)

        # Check result
        self.assertListEqual(expect_describe['SecurityGroups'],
                             actual_security_groups)
    def test_describe_volumes_suscess(self):
        # create data mock
        ec2_client = ec2_utils_mock.client_connect()

        ec2_client.create_volume(
            AvailabilityZone=volum_test['AvailabilityZone'],
            Encrypted=volum_test['Encrypted'],
            Size=volum_test['Size'],
            TagSpecifications=volum_test['TagSpecifications'])

        # Call function test
        actual_volumes = ec2_utils.describe_volumes(trace_id, aws_account,
                                                    ec2_client, region_name)
        # Check result
        self.assertEqual(1, len(actual_volumes))
        self.assertEqual(volum_test['Encrypted'],
                         actual_volumes[0]['Encrypted'])

        self.assertEqual(volum_test['Size'], actual_volumes[0]['Size'])

        self.assertDictEqual(
            volum_test['TagSpecifications'][0]['Tags'][0],
            actual_volumes[0]['Tags'][0],
        )
예제 #14
0
def check_asc_item_07_01(trace_id, check_history_id, organization_id,
                         project_id, aws_account, session, result_json_path):
    cw_logger = common_utils.begin_cw_logger(trace_id, __name__,
                                             inspect.currentframe())
    check_results = []
    try:
        regions = aws_common.get_regions(trace_id, session, is_cw_logger=True)
    except PmError as e:
        cw_logger.error("Regionの情報の取得に失敗しました。")
        raise common_utils.write_log_pm_error(e, cw_logger)

    for region in regions:
        region_name = region["RegionName"]
        if region_name in REGION_IGNORE:
            continue
        ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name,
                                             aws_account, is_cw_logger=True)
        try:
            # EBS情報を取得する。
            ebs_volumes = Ec2Utils.describe_volumes(trace_id, aws_account,
                                                    ec2_client, region_name,
                                                    is_cw_logger=True)
        except PmError as e:
            return CheckResult.Error

        # 取得件数が0件の場合、ログを出力し、次のリージョンの処理に進みます。
        if (len(ebs_volumes) == 0):
            cw_logger.info("[%s/%s] EBSボリューム情報の取得件数が0でした。", aws_account,
                           region_name)
            continue
        try:
            # 取得したEBS情報をS3に保存する(EBS情報ファイル)。
            s3_file_name = CommonConst.PATH_CHECK_RAW.format(
                check_history_id, organization_id, project_id, aws_account,
                "ASC/EBS_Volumes_" + region_name + ".json")
            FileUtils.upload_s3(trace_id, ebs_volumes, s3_file_name, True,
                                is_cw_logger=True)
        except PmError as e:
            cw_logger.error("[%s/%s] EBSボリューム情報情報のS3保存に失敗しました。", aws_account,
                            region_name)
            return CheckResult.Error

        # チェックルール
        try:
            for ebs_volume in ebs_volumes:
                if (ebs_volume['Encrypted'] is False):
                    check_result = get_check_asc_item_07_01_result(
                        ebs_volume, region_name)
                    check_results.append(check_result)
        except PmError as e:
            cw_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", aws_account,
                            region_name)
            return CheckResult.Error

    # Export File ASC/CHECK_ ASC_ITEM_07_01.json
    try:
        current_date = date_utils.get_current_date_by_format(
            date_utils.PATTERN_YYYYMMDDHHMMSS)
        check_acs_item_7_01 = {
            'AWSAccount': aws_account,
            'CheckResults': check_results,
            'DateTime': current_date
        }
        FileUtils.upload_s3(trace_id, check_acs_item_7_01, result_json_path,
                            format_json=True, is_cw_logger=True)
    except Exception as e:
        cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account)
        return CheckResult.Error

    # チェック結果
    if len(check_results) > 0:
        return CheckResult.CriticalDefect
    return CheckResult.Normal
예제 #15
0
def execute_security_group_port(trace_id, check_history_id, organization_id,
                                project_id, awsaccount, session,
                                result_json_path, port, check_item_code,
                                excluded_resources):
    pm_logger = common_utils.begin_logger(trace_id, __name__,
                                          inspect.currentframe())
    check_results = []
    # Export File VPC_SecurityGroups_{region}.json
    try:
        regions = aws_common.get_regions(trace_id, session)
    except PmError as e:
        pm_logger.error("Regionの情報の取得に失敗しました。")
        e.pm_notification_error = PmNotificationError(
            check_item_code=check_item_code,
            code_error=CommonConst.KEY_CODE_ERROR_DEFAULT)
        raise common_utils.write_log_pm_error(e, pm_logger)
    for region in regions:
        region_name = region["RegionName"]
        try:
            if region_name in REGION_IGNORE:
                continue
            ec2_client = Ec2Utils.get_ec2_client(trace_id, session,
                                                 region_name, awsaccount)
            s3_file_name = CommonConst.PATH_CHECK_RAW.format(
                check_history_id, organization_id, project_id, awsaccount,
                "VPC_SecurityGroups_" + region_name + ".json")
            if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET",
                                                s3_file_name)) is True:
                try:
                    security_groups = FileUtils.read_json(
                        trace_id, "S3_CHECK_BUCKET", s3_file_name)
                except PmError as e:
                    raise common_utils.write_log_pm_error(e, pm_logger)
            else:
                try:
                    security_groups = Ec2Utils.describe_security_groups(
                        trace_id, awsaccount, ec2_client, region_name)
                except PmError as e:
                    raise common_utils.write_log_pm_error(e, pm_logger)
                if (len(security_groups) == 0):
                    pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。",
                                   awsaccount, region_name)
                    continue
                try:
                    FileUtils.upload_s3(trace_id, security_groups,
                                        s3_file_name, True)
                except PmError as e:
                    pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。",
                                    awsaccount, region_name)
                    raise common_utils.write_log_pm_error(e, pm_logger)
            try:
                for security_group in security_groups:
                    # check excluded resources
                    resource_name = security_group['GroupId']
                    if common_utils.check_excluded_resources(
                            check_item_code, region_name, ResourceType.GroupId,
                            resource_name, excluded_resources):
                        continue

                    for ip_permission in security_group['IpPermissions']:
                        if ip_permission['IpProtocol'] != '-1':
                            if ip_permission['IpProtocol'] != CommonConst.TCP:
                                continue
                            if common_utils.check_key(
                                    'FromPort', ip_permission
                            ) is False or ip_permission['FromPort'] > port:
                                continue
                            if common_utils.check_key(
                                    'ToPort', ip_permission
                            ) is False or ip_permission['ToPort'] < port:
                                continue
                        for ip_range in ip_permission['IpRanges']:
                            if common_utils.check_key('CidrIp', ip_range):
                                if (CommonConst.CIDR_IP_NOT_SECURITY ==
                                        ip_range['CidrIp']):
                                    check_result = get_check_result(
                                        security_group, ip_permission,
                                        ip_range, region_name)
                                    check_results.append(check_result)
                                    break
            except Exception as e:
                pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount,
                                region_name)
                raise common_utils.write_log_pm_error(e, pm_logger)
        except Exception as e:
            pm_error = common_utils.write_log_exception(e, pm_logger)
            pm_error.pm_notification_error = PmNotificationError(
                check_item_code=check_item_code,
                region=region_name,
                code_error=CommonConst.KEY_CODE_ERROR_DEFAULT)
            raise common_utils.write_log_pm_error(pm_error, pm_logger)

    result_security_group = CheckResult.Normal
    if (len(check_results) > 0):
        result_security_group = CheckResult.CriticalDefect

    # 検出結果を1つのチェック結果JSONファイルに保存する。
    try:
        current_date = date_utils.get_current_date_by_format(
            date_utils.PATTERN_YYYYMMDDHHMMSS)
        check_rule_security_group = {
            'AWSAccount': awsaccount,
            'CheckResults': check_results,
            'DateTime': current_date
        }
        FileUtils.upload_s3(trace_id, check_rule_security_group,
                            result_json_path, True)
    except Exception as e:
        pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount)
        pm_error = common_utils.write_log_exception(e, pm_logger)
        pm_error.pm_notification_error = PmNotificationError(
            check_item_code=check_item_code,
            code_error=CommonConst.KEY_CODE_ERROR_DEFAULT)
        raise common_utils.write_log_pm_error(pm_error, pm_logger)

    return result_security_group
예제 #16
0
def check_cis_item_4_03(trace_id, check_history_id, organization_id,
                        project_id, awsaccount, session, result_json_path,
                        check_item_code, excluded_resources):
    pm_logger = common_utils.begin_logger(trace_id, __name__,
                                          inspect.currentframe())
    check_results = []
    try:
        regions = aws_common.get_regions(trace_id, session)
    except PmError as e:
        pm_logger.error("Regionの情報の取得に失敗しました。")
        e.pm_notification_error = PmNotificationError(
            check_item_code=check_item_code,
            code_error=CommonConst.KEY_CODE_ERROR_DEFAULT)
        raise common_utils.write_log_pm_error(e, pm_logger)
    for region in regions:
        region_name = region["RegionName"]
        try:
            if region_name in REGION_IGNORE:
                continue
            ec2_client = Ec2Utils.get_ec2_client(trace_id, session,
                                                 region_name, awsaccount)
            # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にセキュリティグループ情報を取得する。
            s3_file_name = CommonConst.PATH_CHECK_RAW.format(
                check_history_id, organization_id, project_id, awsaccount,
                "VPC_SecurityGroups_" + region_name + ".json")
            if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET",
                                                s3_file_name)) is True:
                try:
                    security_groups = FileUtils.read_json(
                        trace_id, "S3_CHECK_BUCKET", s3_file_name)
                except PmError as e:
                    raise common_utils.write_log_pm_error(e, pm_logger)
            else:
                try:
                    security_groups = Ec2Utils.describe_security_groups(
                        trace_id, awsaccount, ec2_client, region_name)
                except PmError as e:
                    raise common_utils.write_log_pm_error(e, pm_logger)
                if (len(security_groups) == 0):
                    pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。",
                                   awsaccount, region_name)
                try:
                    if (len(security_groups) > 0):
                        FileUtils.upload_s3(trace_id, security_groups,
                                            s3_file_name, True)
                except PmError as e:
                    pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。",
                                    awsaccount, region_name)
                    raise common_utils.write_log_pm_error(e, pm_logger)

            # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にEC2インスタンス情報を取得する。
            s3_file_name_iam_instances = CommonConst.PATH_CHECK_RAW.format(
                check_history_id, organization_id, project_id, awsaccount,
                "IAM_Instances_" + region_name + ".json")
            if (aws_common.check_exists_file_s3(
                    trace_id, "S3_CHECK_BUCKET",
                    s3_file_name_iam_instances)) is True:
                try:
                    reservation_instances = FileUtils.read_json(
                        trace_id, "S3_CHECK_BUCKET",
                        s3_file_name_iam_instances)
                except PmError as e:
                    raise common_utils.write_log_pm_error(e, pm_logger)
            else:
                try:
                    reservation_instances = Ec2Utils.describe_instances(
                        trace_id, awsaccount, ec2_client, region_name)
                except PmError as e:
                    raise common_utils.write_log_pm_error(e, pm_logger)
                if (len(reservation_instances) == 0):
                    pm_logger.info("[%s/%s] EC2インスタンス情報の取得件数が0でした。",
                                   awsaccount, region_name)
            try:
                if (len(reservation_instances) > 0):
                    s3_file_name = CommonConst.PATH_CHECK_RAW.format(
                        check_history_id, organization_id, project_id,
                        awsaccount,
                        "VPC_SG_Instances_" + region_name + ".json")
                    FileUtils.upload_s3(trace_id, reservation_instances,
                                        s3_file_name, True)
            except PmError as e:
                pm_logger.error("[%s/%s] EC2インスタンス情報のS3保存に失敗しました。", awsaccount,
                                region_name)
                raise common_utils.write_log_pm_error(e, pm_logger)

            check1 = []
            check2 = []
            try:
                # リソース情報ファイルのセキュリティグループ情報から、インバウンドルール、アウトバウンドルールを設定しているdefaultセキュリティグループを検出する。
                for security_group in security_groups:
                    # check excluded resources
                    resource_name = security_group['GroupId']
                    if common_utils.check_excluded_resources(
                            check_item_code, region_name, ResourceType.GroupId,
                            resource_name, excluded_resources):
                        continue

                    if (security_group['GroupName'] == CommonConst.DEFAULT
                            and len(security_group['IpPermissions']) > 0 and
                            len(security_group['IpPermissionsEgress']) > 0):
                        check1.append(security_group['GroupId'])

                # リソース情報ファイルのEC2インスタンス情報から、defaultセキュリティグループをアタッチしたEC2インスタンスを検出する、。
                for reservation_instance in reservation_instances:
                    for instance in reservation_instance['Instances']:
                        for security_group in instance['SecurityGroups']:
                            if security_group[
                                    'GroupName'] == CommonConst.DEFAULT:
                                if common_utils.check_key('Tags',
                                                          instance) is True:
                                    name_tag = next(
                                        filter(
                                            lambda tag: tag['Key'] == 'Name',
                                            instance['Tags']), None)
                                    instance[
                                        'InstanceName'] = None if name_tag is None else name_tag[
                                            'Value']
                                check2.append(instance)

                if (len(check1) > 0 or len(check2) > 0):
                    check_results.append(
                        get_check_cis_item_4_03_result(check1, check2,
                                                       region_name))
            except Exception as e:
                pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount,
                                region_name)
                raise common_utils.write_log_pm_error(e, pm_logger)
        except Exception as e:
            pm_error = common_utils.write_log_exception(e, pm_logger)
            pm_error.pm_notification_error = PmNotificationError(
                check_item_code=check_item_code,
                region=region_name,
                code_error=CommonConst.KEY_CODE_ERROR_DEFAULT)
            raise common_utils.write_log_pm_error(pm_error, pm_logger)

    # Export File CHECK_CIS12_ITEM_4_03.json
    try:
        current_date = date_utils.get_current_date_by_format(
            date_utils.PATTERN_YYYYMMDDHHMMSS)
        check_cis_item_4_03 = {
            'AWSAccount': awsaccount,
            'CheckResults': check_results,
            'DateTime': current_date
        }
        FileUtils.upload_s3(trace_id, check_cis_item_4_03, result_json_path,
                            True)
    except Exception as e:
        pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount)
        pm_error = common_utils.write_log_exception(e, pm_logger)
        pm_error.pm_notification_error = PmNotificationError(
            check_item_code=check_item_code,
            code_error=CommonConst.KEY_CODE_ERROR_DEFAULT)
        raise common_utils.write_log_pm_error(pm_error, pm_logger)

    # チェック結果
    if len(check_results) > 0:
        return CheckResult.MinorInadequacies
    return CheckResult.Normal