Ejemplo n.º 1
0
    def _output_results_to_db(self, resource_name, violations):
        """Output scanner results to DB.

        Args:
            resource_name (str): Resource name.
            violations (list): A list of violations.

        Returns:
            list: Violations that encountered an error during insert.
        """
        resource_name = 'violations'
        (inserted_row_count, violation_errors) = (0, [])
        try:
            vdao = violation_dao.ViolationDao(self.global_configs)
            (inserted_row_count, violation_errors) = vdao.insert_violations(
                violations,
                resource_name=resource_name,
                snapshot_timestamp=self.snapshot_timestamp)
        except db_errors.MySQLError as err:
            LOGGER.error('Error importing violations to database: %s', err)

        # TODO: figure out what to do with the errors. For now, just log it.
        LOGGER.debug('Inserted %s rows with %s errors', inserted_row_count,
                     len(violation_errors))

        return violation_errors
    def __init__(self, resource, cycle_timestamp,
                 violations, notifier_config, pipeline_config):
        """Constructor for the base pipeline.

        Args:
            resource: violation resource name
            cycle_timestamp: String of timestamp, formatted as YYYYMMDDTHHMMSSZ.
            violations: Dictonary of violations
            notifier_config: Dictionary of notifier configurations.
            pipeline_config: Dictonary of pipeline confogurations.

        Returns:
            None
        """
        self.cycle_timestamp = cycle_timestamp
        self.resource = resource
        self.notifier_config = notifier_config
        self.pipeline_config = pipeline_config
        # TODO: import api_client
        # self.api_client = api_client

        # Initializing DAOs
        self.dao = dao.Dao()
        self.project_dao = project_dao.ProjectDao()
        self.violation_dao = violation_dao.ViolationDao()

        # Get violations
        self.violations = violations
    def __init__(self, resource, cycle_timestamp, violations, global_configs,
                 notifier_config, pipeline_config):
        """Constructor for the base pipeline.

        Args:
            resource (str): Violation resource name.
            cycle_timestamp (str): Snapshot timestamp,
               formatted as YYYYMMDDTHHMMSSZ.
            violations (dict): Violations.
            global_configs (dict): Global configurations.
            notifier_config (dict): Notifier configurations.
            pipeline_config (dict): Pipeline configurations.
        """
        self.cycle_timestamp = cycle_timestamp
        self.resource = resource
        self.global_configs = global_configs
        self.notifier_config = notifier_config
        self.pipeline_config = pipeline_config
        # TODO: import api_client
        # self.api_client = api_client

        # Initializing DAOs
        self.dao = dao.Dao(global_configs)
        self.project_dao = project_dao.ProjectDao(global_configs)
        self.violation_dao = violation_dao.ViolationDao(global_configs)

        # Get violations
        self.violations = violations
Ejemplo n.º 4
0
def main(_):
    """Main function.

        Args:
            _ (obj): Result of the last expression evaluated in the interpreter.
    """
    if FLAGS.timestamp is not None:
        timestamp = FLAGS.timestamp
    else:
        timestamp = _get_timestamp()

    if FLAGS.config is None:
        LOGGER.error('You must specify a notification pipeline')
        exit()

    notifier_configs = FLAGS.FlagValuesDict()
    configs = file_loader.read_and_parse_file(FLAGS.config)

    # get violations
    v_dao = violation_dao.ViolationDao()
    violations = {}
    for resource in RESOURCE_MAP:
        try:
            violations[resource] = v_dao.get_all_violations(
                timestamp, RESOURCE_MAP[resource])
        except db_errors.MySQLError, e:
            # even if an error is raised we still want to continue execution
            # this is because if we don't have violations the Mysql table
            # is not present and an error is thrown
            LOGGER.error('get_all_violations error: %s', e.message)
Ejemplo n.º 5
0
def _output_results(all_violations, snapshot_timestamp, **kwargs):
    """Send the output results.

    Args:
        all_violations: The list of violations to report.
        snapshot_timestamp: The snapshot timetamp associated with this scan.
        **kwargs: The rest of the args.
    """

    # Write violations to database.
    flattening_scheme = kwargs.get('flattening_scheme')
    resource_name = sm.RESOURCE_MAP[flattening_scheme]
    (inserted_row_count, violation_errors) = (0, [])
    try:
        vdao = violation_dao.ViolationDao()
        (inserted_row_count, violation_errors) = vdao.insert_violations(
            all_violations,
            resource_name=resource_name,
            snapshot_timestamp=snapshot_timestamp)
    except db_errors.MySQLError as err:
        LOGGER.error('Error importing violations to database: %s', err)

    # TODO: figure out what to do with the errors. For now, just log it.
    LOGGER.debug('Inserted %s rows with %s errors', inserted_row_count,
                 len(violation_errors))

    # Write the CSV for all the violations.
    if FLAGS.output_path:
        output_csv_name = None
        with csv_writer.write_csv(resource_name=flattening_scheme,
                                  data=_flatten_violations(
                                      all_violations, flattening_scheme),
                                  write_header=True) as csv_file:
            output_csv_name = csv_file.name
            LOGGER.info('CSV filename: %s', output_csv_name)

            # Scanner timestamp for output file and email.
            now_utc = datetime.utcnow()

            output_path = FLAGS.output_path
            if not output_path.startswith('gs://'):
                if not os.path.exists(FLAGS.output_path):
                    os.makedirs(output_path)
                output_path = os.path.abspath(output_path)
            _upload_csv(output_path, now_utc, output_csv_name)

            # Send summary email.
            if FLAGS.email_recipient is not None:
                email_pipeline = (
                    email_scanner_summary_pipeline.EmailScannerSummaryPipeline(
                        FLAGS.sendgrid_api_key))
                email_pipeline.run(output_csv_name,
                                   _get_output_filename(now_utc), now_utc,
                                   all_violations,
                                   kwargs.get('resource_counts',
                                              {}), violation_errors,
                                   FLAGS.email_sender, FLAGS.email_recipient)
    def _get_violation_dao(self):
        """Init or get violation dao.

        Returns:
            violation_dao: ViolationDao instance
        """
        if not self.violation_dao:
            self.violation_dao = violation_dao.ViolationDao(
                self.global_configs)
        return self.violation_dao
Ejemplo n.º 7
0
def main(_):
    """main function"""
    if FLAGS.timestamp is not None:
        timestamp = FLAGS.timestamp
    else:
        timestamp = _get_timestamp()

    if FLAGS.config is None:
        LOGGER.error('You must specify a notification pipeline')
        exit()

    notifier_configs = FLAGS.FlagValuesDict()
    configs = file_loader.read_and_parse_file(FLAGS.config)

    # get violations
    v_dao = violation_dao.ViolationDao()
    violations = {
        'violations':
        v_dao.get_all_violations(timestamp, 'violations'),
        'bucket_acl_violations':
        v_dao.get_all_violations(timestamp, 'buckets_acl_violations')
    }
    for retrieved_v in violations:
        LOGGER.info('retrieved %d violations for resource \'%s\'',
                    len(violations[retrieved_v]), retrieved_v)

    # build notification pipelines
    pipelines = []
    for resource in configs['resources']:
        if violations.get(resource['resource']) is None:
            LOGGER.error('The resource name \'%s\' is invalid, skipping',
                         resource['resource'])
            continue
        if resource['should_notify'] is False:
            continue
        for pipeline in resource['pipelines']:
            LOGGER.info('Running \'%s\' pipeline for resource \'%s\'',
                        pipeline['name'], resource['resource'])
            chosen_pipeline = find_pipelines(pipeline['name'])
            pipelines.append(
                chosen_pipeline(resource['resource'], timestamp,
                                violations[resource['resource']],
                                notifier_configs, pipeline['configuration']))

    # run the pipelines
    for pipeline in pipelines:
        pipeline.run()
Ejemplo n.º 8
0
def main(_):
    """Main function.

        Args:
            _ (obj): Result of the last expression evaluated in the interpreter.
    """
    notifier_flags = FLAGS.FlagValuesDict()

    forseti_config = notifier_flags.get('forseti_config')

    if forseti_config is None:
        LOGGER.error('Path to Forseti Security config needs to be specified.')
        sys.exit()

    try:
        configs = file_loader.read_and_parse_file(forseti_config)
    except IOError:
        LOGGER.error('Unable to open Forseti Security config file. '
                     'Please check your path and filename and try again.')
        sys.exit()
    global_configs = configs.get('global')
    notifier_configs = configs.get('notifier')

    timestamp = notifier_configs.get('timestamp')
    if timestamp is None:
        timestamp = _get_timestamp(global_configs)

    # get violations
    v_dao = violation_dao.ViolationDao(global_configs)
    violations_as_dict = v_dao.get_all_violations(timestamp)

    for i in violations_as_dict:
        i['created_at_datetime'] = (
            i.get('created_at_datetime').strftime('%Y-%m-%dT%H:%M:%SZ'))

    violations = {}
    try:
        violations = violation_dao.map_by_resource(violations_as_dict)
    except db_errors.MySQLError, e:
        # even if an error is raised we still want to continue execution
        # this is because if we don't have violations the Mysql table
        # is not present and an error is thrown
        LOGGER.error('get_all_violations error: %s', e.message)
Ejemplo n.º 9
0
    def setUp(self, mock_db_connector):
        mock_db_connector.return_value = None
        self.resource_name = 'violations'
        self.dao = violation_dao.ViolationDao()
        self.fake_snapshot_timestamp = '12345'
        self.fake_table_name = (
            '%s_%s' % (self.resource_name, self.fake_snapshot_timestamp))
        self.fake_violations = [
            rules.RuleViolation(
                resource_type='x',
                resource_id='1',
                rule_name='rule name',
                rule_index=0,
                violation_type='ADDED',
                role='roles/editor',
                members=[
                    iam.IamPolicyMember.create_from(m)
                    for m in ['user:[email protected]', 'user:[email protected]']
                ],
            ),
            rules.RuleViolation(
                resource_type='%sb' % ('a' * 300),
                resource_id='1',
                rule_name='%sd' % ('c' * 300),
                rule_index=1,
                violation_type='REMOVED',
                role='%s' % ('e' * 300),
                members=[
                    iam.IamPolicyMember.create_from('user:%sh' % ('g' * 300))
                ],
            ),
        ]

        self.expected_fake_violations = [
            ('x', '1', 'rule name', 0, 'ADDED', 'roles/editor',
             'user:[email protected]'),
            ('x', '1', 'rule name', 0, 'ADDED', 'roles/editor',
             'user:[email protected]'),
            ('a' * 255, '1', 'c' * 255, 1, 'REMOVED', 'e' * 255,
             ('user:%s' % ('g' * 300))[:255]),
        ]
Ejemplo n.º 10
0
    def setUp(self, mock_db_connector):
        mock_db_connector.return_value = None
        self.resource_name = 'violations'
        self.dao = violation_dao.ViolationDao()
        self.fake_snapshot_timestamp = '12345'
        self.fake_table_name = (
            '%s_%s' % (self.resource_name, self.fake_snapshot_timestamp))
        self.fake_violations = [
            rules.RuleViolation(
                resource_type='x',
                resource_id='1',
                rule_name='rule name',
                rule_index=0,
                violation_type='ADDED',
                role='roles/editor',
                members=[
                    iam.IamPolicyMember.create_from(m)
                    for m in ['user:[email protected]', 'user:[email protected]']
                ],
            ),
            rules.RuleViolation(
                resource_type='%se' % ('a' * 300),
                resource_id='1',
                rule_name='%sh' % ('b' * 300),
                rule_index=1,
                violation_type='REMOVED',
                role='%s' % ('c' * 300),
                members=[
                    iam.IamPolicyMember.create_from('user:%s' % ('d' * 300))
                ],
            ),
        ]
        long_string = '{"member": "user:%s", "role": "%s"}' % (('d' * 300),
                                                               ('c' * 300))

        self.fake_flattened_violations = [
            {
                'resource_id': '1',
                'resource_type': self.fake_violations[0].resource_type,
                'rule_index': 0,
                'rule_name': self.fake_violations[0].rule_name,
                'violation_type': self.fake_violations[0].violation_type,
                'violation_data': {
                    'role': self.fake_violations[0].role,
                    'member': 'user:[email protected]'
                }
            },
            {
                'resource_id': '1',
                'resource_type': self.fake_violations[0].resource_type,
                'rule_index': 0,
                'rule_name': self.fake_violations[0].rule_name,
                'violation_type': self.fake_violations[0].violation_type,
                'violation_data': {
                    'role': self.fake_violations[0].role,
                    'member': 'user:[email protected]'
                }
            },
            {
                'resource_id': '1',
                'resource_type': self.fake_violations[1].resource_type,
                'rule_index': 1,
                'rule_name': self.fake_violations[1].rule_name,
                'violation_type': self.fake_violations[1].violation_type,
                'violation_data': {
                    'role': self.fake_violations[1].role,
                    'member': 'user:%s' % ('d' * 300)
                }
            },
        ]

        self.expected_fake_violations = [
            ('x', '1', 'rule name', 0, 'ADDED',
             '{"member": "user:[email protected]", "role": "roles/editor"}'),
            ('x', '1', 'rule name', 0, 'ADDED',
             '{"member": "user:[email protected]", "role": "roles/editor"}'),
            ('a' * 255, '1', 'b' * 255, 1, 'REMOVED', long_string),
        ]
Ejemplo n.º 11
0
def _output_results(all_violations, snapshot_timestamp, **kwargs):
    """Send the output results.

    Args:
        all_violations: The list of violations to report.
        snapshot_timestamp: The snapshot timetamp associated with this scan.
        **kwargs: The rest of the args.
    """

    # Write violations to database.
    flattening_scheme = kwargs.get('flattening_scheme')
    resource_name = sm.RESOURCE_MAP[flattening_scheme]
    (inserted_row_count, violation_errors) = (0, [])
    all_violations = _flatten_violations(all_violations, flattening_scheme)
    try:
        vdao = violation_dao.ViolationDao()
        (inserted_row_count, violation_errors) = vdao.insert_violations(
            all_violations,
            resource_name=resource_name,
            snapshot_timestamp=snapshot_timestamp)
    except db_errors.MySQLError as err:
        LOGGER.error('Error importing violations to database: %s', err)

    # TODO: figure out what to do with the errors. For now, just log it.
    LOGGER.debug('Inserted %s rows with %s errors', inserted_row_count,
                 len(violation_errors))

    # Write the CSV for all the violations.
    if FLAGS.output_path:
        LOGGER.info('Writing violations to csv...')
        output_csv_name = None
        with csv_writer.write_csv(resource_name=resource_name,
                                  data=all_violations,
                                  write_header=True) as csv_file:
            output_csv_name = csv_file.name
            LOGGER.info('CSV filename: %s', output_csv_name)

            # Scanner timestamp for output file and email.
            now_utc = datetime.utcnow()

            output_path = FLAGS.output_path
            if not output_path.startswith('gs://'):
                if not os.path.exists(FLAGS.output_path):
                    os.makedirs(output_path)
                output_path = os.path.abspath(output_path)
            _upload_csv(output_path, now_utc, output_csv_name)

            # Send summary email.
            if FLAGS.email_recipient is not None:
                payload = {
                    'email_sender': FLAGS.email_sender,
                    'email_recipient': FLAGS.email_recipient,
                    'sendgrid_api_key': FLAGS.sendgrid_api_key,
                    'output_csv_name': output_csv_name,
                    'output_filename': _get_output_filename(now_utc),
                    'now_utc': now_utc,
                    'all_violations': all_violations,
                    'resource_counts': kwargs.get('resource_counts', {}),
                    'violation_errors': violation_errors
                }
                message = {'status': 'scanner_done', 'payload': payload}
                notifier.process(message)
    def setUp(self, mock_db_connector):
        mock_db_connector.return_value = None
        self.resource_name = 'violations'
        self.dao = violation_dao.ViolationDao()
        self.fake_snapshot_timestamp = '12345'
        self.fake_table_name = ('%s_%s' %
            (self.resource_name, self.fake_snapshot_timestamp))
        self.fake_violations = [
            rules.RuleViolation(
                resource_type='x',
                resource_id='1',
                rule_name='rule name',
                rule_index=0,
                violation_type='ADDED',
                role='roles/editor',
                members=[iam.IamPolicyMember.create_from(m)
                    for m in ['user:[email protected]', 'user:[email protected]']],
            ),
            rules.RuleViolation(
                resource_type='%se' % ('a'*300),
                resource_id='1',
                rule_name='%sh' % ('b'*300),
                rule_index=1,
                violation_type='REMOVED',
                role='%s' % ('c'*300),
                members=[iam.IamPolicyMember.create_from(
                    'user:%s' % ('d'*300))],
            ),
        ]
        long_string = '{"member": "user:%s", "role": "%s"}' % (('d'*300),('c'*300))

        self.fake_flattened_violations = [
            {
                'resource_id': '1',
                'resource_type':
                    self.fake_violations[0].resource_type,
                'rule_index': 0,
                'rule_name': self.fake_violations[0].rule_name,
                'violation_type': self.fake_violations[0].violation_type,
                'violation_data': {
                    'role': self.fake_violations[0].role,
                    'member': 'user:[email protected]'
                }
            },
            {
                'resource_id': '1',
                'resource_type':
                    self.fake_violations[0].resource_type,
                'rule_index': 0,
                'rule_name': self.fake_violations[0].rule_name,
                'violation_type': self.fake_violations[0].violation_type,
                'violation_data': {
                    'role': self.fake_violations[0].role,
                    'member': 'user:[email protected]'
                }
            },
            {
                'resource_id': '1',
                'resource_type':
                    self.fake_violations[1].resource_type,
                'rule_index': 1,
                'rule_name': self.fake_violations[1].rule_name,
                'violation_type': self.fake_violations[1].violation_type,
                'violation_data': {
                    'role': self.fake_violations[1].role,
                    'member': 'user:%s' % ('d'*300)
                }
            },
        ]

        self.expected_fake_violations = [
            ('2e598a34bf36b5ad577f6e8eec47acf0ffe71143e82bc78c9cfaac8f553a4fa4f09078ebe0769639d9dc9fffc2dffb94345ae6696fbe4e646e8b9bb723fd3ab0',
             'x', '1', 'rule name', 0, 'ADDED',
             '{"member": "user:[email protected]", "role": "roles/editor"}',
             '2020-08-28 10:20:30'),
            ('5420235c547006300e7842c45c9b0419bc1c5590fd4200607e9925010123f5905f374c2b182420f6ecb161146c8d18c6683045026c810009154b03a56f5a94f5',
             'x', '1', 'rule name', 0, 'ADDED',
             '{"member": "user:[email protected]", "role": "roles/editor"}',
             '2010-08-28 10:20:30'),
            ('i'*255,
             'a'*255, '1', 'b'*255, 1, 'REMOVED', long_string,
             '2030-08-28 10:20:30'),
        ]