Exemplo n.º 1
0
def main(_):
    """Runs the Inventory Loader.

    Args:
        _ (list): args that aren't used

    Returns:
    """
    del _
    inventory_flags = FLAGS.FlagValuesDict()

    if inventory_flags.get('list_resources'):
        inventory_util.list_resource_pipelines()
        sys.exit()

    _configure_logging(inventory_flags.get('loglevel'))

    config_path = inventory_flags.get('config_path')

    if config_path is None:
        LOGGER.error('Path to pipeline config needs to be specified.')
        sys.exit()

    dao_map = _create_dao_map()

    cycle_time, cycle_timestamp = _start_snapshot_cycle(dao_map.get('dao'))

    pipeline_builder = builder.PipelineBuilder(cycle_timestamp, config_path,
                                               flags, api_map.API_MAP, dao_map)
    pipelines = pipeline_builder.build()

    run_statuses = _run_pipelines(pipelines)

    if all(run_statuses):
        snapshot_cycle_status = 'SUCCESS'
    elif any(run_statuses):
        snapshot_cycle_status = 'PARTIAL_SUCCESS'
    else:
        snapshot_cycle_status = 'FAILURE'

    _complete_snapshot_cycle(dao_map.get('dao'), cycle_timestamp,
                             snapshot_cycle_status)

    if inventory_flags.get('email_recipient') is not None:
        payload = {
            'email_sender': inventory_flags.get('email_sender'),
            'email_recipient': inventory_flags.get('email_recipient'),
            'sendgrid_api_key': inventory_flags.get('sendgrid_api_key'),
            'cycle_time': cycle_time,
            'cycle_timestamp': cycle_timestamp,
            'snapshot_cycle_status': snapshot_cycle_status,
            'pipelines': pipelines
        }
        message = {'status': 'inventory_done', 'payload': payload}
        notifier.process(message)
Exemplo n.º 2
0
    def _output_results(self, all_violations, resource_counts):
        """Output results.

        Args:
            all_violations (list): A list of violations
            resource_counts (dict): Resource count map.
        """
        resource_name = 'violations'

        all_violations = list(self._flatten_violations(all_violations))
        violation_errors = self._output_results_to_db(resource_name,
                                                      all_violations)

        # Write the CSV for all the violations.
        # TODO: Move this into the base class? The IAP scanner version of this
        # is a wholesale copy.
        if self.scanner_configs.get('output_path'):
            LOGGER.info('Writing violations to csv...')
            output_csv_name = None
            with csv_writer.write_csv(resource_name=resource_name,
                                      data=all_violations,
                                      write_header=True) as csv_file:
                output_csv_name = csv_file.name
                LOGGER.info('CSV filename: %s', output_csv_name)

                # Scanner timestamp for output file and email.
                now_utc = datetime.utcnow()

                output_path = self.scanner_configs.get('output_path')
                if not output_path.startswith('gs://'):
                    if not os.path.exists(
                            self.scanner_configs.get('output_path')):
                        os.makedirs(output_path)
                    output_path = os.path.abspath(output_path)
                self._upload_csv(output_path, now_utc, output_csv_name)

                # Send summary email.
                # TODO: Untangle this email by looking for the csv content
                # from the saved copy.
                if self.global_configs.get('email_recipient') is not None:
                    payload = {
                        'email_description':
                        'Policy Scan',
                        'email_sender':
                        self.global_configs.get('email_sender'),
                        'email_recipient':
                        self.global_configs.get('email_recipient'),
                        'sendgrid_api_key':
                        self.global_configs.get('sendgrid_api_key'),
                        'output_csv_name':
                        output_csv_name,
                        'output_filename':
                        self._get_output_filename(now_utc),
                        'now_utc':
                        now_utc,
                        'all_violations':
                        all_violations,
                        'resource_counts':
                        resource_counts,
                        'violation_errors':
                        violation_errors
                    }
                    message = {'status': 'scanner_done', 'payload': payload}
                    notifier.process(message)
Exemplo n.º 3
0
def _output_results(all_violations, snapshot_timestamp, **kwargs):
    """Send the output results.

    Args:
        all_violations: The list of violations to report.
        snapshot_timestamp: The snapshot timetamp associated with this scan.
        **kwargs: The rest of the args.
    """

    # Write violations to database.
    flattening_scheme = kwargs.get('flattening_scheme')
    resource_name = sm.RESOURCE_MAP[flattening_scheme]
    (inserted_row_count, violation_errors) = (0, [])
    all_violations = _flatten_violations(all_violations, flattening_scheme)
    try:
        vdao = violation_dao.ViolationDao()
        (inserted_row_count, violation_errors) = vdao.insert_violations(
            all_violations,
            resource_name=resource_name,
            snapshot_timestamp=snapshot_timestamp)
    except db_errors.MySQLError as err:
        LOGGER.error('Error importing violations to database: %s', err)

    # TODO: figure out what to do with the errors. For now, just log it.
    LOGGER.debug('Inserted %s rows with %s errors', inserted_row_count,
                 len(violation_errors))

    # Write the CSV for all the violations.
    if FLAGS.output_path:
        LOGGER.info('Writing violations to csv...')
        output_csv_name = None
        with csv_writer.write_csv(resource_name=resource_name,
                                  data=all_violations,
                                  write_header=True) as csv_file:
            output_csv_name = csv_file.name
            LOGGER.info('CSV filename: %s', output_csv_name)

            # Scanner timestamp for output file and email.
            now_utc = datetime.utcnow()

            output_path = FLAGS.output_path
            if not output_path.startswith('gs://'):
                if not os.path.exists(FLAGS.output_path):
                    os.makedirs(output_path)
                output_path = os.path.abspath(output_path)
            _upload_csv(output_path, now_utc, output_csv_name)

            # Send summary email.
            if FLAGS.email_recipient is not None:
                payload = {
                    'email_sender': FLAGS.email_sender,
                    'email_recipient': FLAGS.email_recipient,
                    'sendgrid_api_key': FLAGS.sendgrid_api_key,
                    'output_csv_name': output_csv_name,
                    'output_filename': _get_output_filename(now_utc),
                    'now_utc': now_utc,
                    'all_violations': all_violations,
                    'resource_counts': kwargs.get('resource_counts', {}),
                    'violation_errors': violation_errors
                }
                message = {'status': 'scanner_done', 'payload': payload}
                notifier.process(message)
Exemplo n.º 4
0
def main(_):
    """Runs the Inventory Loader.

    Args:
        _ (list): args that aren't used
    """
    del _
    inventory_flags = FLAGS.FlagValuesDict()

    if inventory_flags.get('list_resources'):
        inventory_util.list_resource_pipelines()
        sys.exit()

    forseti_config = inventory_flags.get('forseti_config')
    if forseti_config is None:
        LOGGER.error('Path to Forseti Security config needs to be specified.')
        sys.exit()

    try:
        configs = file_loader.read_and_parse_file(forseti_config)
    except IOError:
        LOGGER.error('Unable to open Forseti Security config file. '
                     'Please check your path and filename and try again.')
        sys.exit()
    global_configs = configs.get('global')
    inventory_configs = configs.get('inventory')

    log_util.set_logger_level_from_config(inventory_configs.get('loglevel'))

    dao_map = _create_dao_map(global_configs)

    cycle_time, cycle_timestamp = _start_snapshot_cycle(dao_map.get('dao'))

    pipeline_builder = builder.PipelineBuilder(
        cycle_timestamp,
        inventory_configs,
        global_configs,
        api_map.API_MAP,
        dao_map)
    pipelines = pipeline_builder.build()

    run_statuses = _run_pipelines(pipelines)

    if all(run_statuses):
        snapshot_cycle_status = 'SUCCESS'
    elif any(run_statuses):
        snapshot_cycle_status = 'PARTIAL_SUCCESS'
    else:
        snapshot_cycle_status = 'FAILURE'

    _complete_snapshot_cycle(dao_map.get('dao'), cycle_timestamp,
                             snapshot_cycle_status)

    if global_configs.get('email_recipient') is not None:
        payload = {
            'email_sender': global_configs.get('email_sender'),
            'email_recipient': global_configs.get('email_recipient'),
            'sendgrid_api_key': global_configs.get('sendgrid_api_key'),
            'cycle_time': cycle_time,
            'cycle_timestamp': cycle_timestamp,
            'snapshot_cycle_status': snapshot_cycle_status,
            'pipelines': pipelines
        }
        message = {
            'status': 'inventory_done',
            'payload': payload
        }
        notifier.process(message)