コード例 #1
0
ファイル: sqs_test.py プロジェクト: threathive/amira
    def test_get_created_objects(self, mock_sqs_queue):
        s3_event_notification_message_mocks = mock_s3_event_notifications(
            mock_sqs_queue, 's3_event_notifications.json',
        )
        sqs_handler = SqsHandler('us-west-1', 'godzilla')
        created_objects = sqs_handler.get_created_objects()
        actual_key_names = [
            created_object.key_name
            for created_object in created_objects
        ]

        expected_key_names = [
            'AMIRA-1561-2016_01_11-10_54_07.tar.gz',
            'AMIRA-1562-2016_01_11-10_54_47.tar.gz',
            'AMIRA-1563-2016_01_11-10_54_58.tar.gz',
            'AMIRA-1564-2016_01_11-10_55_12.tar.gz',
            'AMIRA-1565-2016_01_11-10_55_32.tar.gz',
            'AMIRA-1566-2016_01_11-10_55_49.tar.gz',
            'AMIRA-1567-2016_01_11-10_56_09.tar.gz',
        ]
        assert expected_key_names == actual_key_names

        mock_sqs_queue.delete_message_batch.assert_called_once_with(
            s3_event_notification_message_mocks,
        )
コード例 #2
0
ファイル: sqs_test.py プロジェクト: threathive/amira
    def test_get_created_objects_no_created_objects(self, mock_sqs_queue):
        mock_sqs_queue.get_messages.side_effect = [[]]

        sqs_handler = SqsHandler('us-west-1', 'godzilla')
        created_objects = sqs_handler.get_created_objects()
        assert 0 == len(list(created_objects))

        assert mock_sqs_queue.delete_message_batch.called is False
コード例 #3
0
ファイル: sqs_test.py プロジェクト: Bijaye/amira
    def test_get_created_objects_no_records(self, mock_sqs_queue):
        """Tests the behavior of `get_created_objects()` method in case
        the message received from SQS does not contain the "Records"
        field in the message body.
        """
        mock_s3_event_notifications(
            mock_sqs_queue, 's3_test_event_notification.json')

        sqs_handler = SqsHandler('us-west-2', 'godzilla')
        created_objects = sqs_handler.get_created_objects()
        created_objects = list(created_objects)
        assert [] == created_objects
コード例 #4
0
ファイル: sqs_test.py プロジェクト: thinhnd8752/amira
    def test_get_created_objects_no_records(self, mock_sqs_queue):
        """Tests the behavior of `get_created_objects()` method in case
        the message received from SQS does not contain the "Records"
        field in the message body.
        """
        mock_s3_event_notifications(mock_sqs_queue,
                                    's3_test_event_notification.json')

        sqs_handler = SqsHandler('us-west-2', 'godzilla')
        created_objects = sqs_handler.get_created_objects()
        created_objects = list(created_objects)
        assert [] == created_objects
コード例 #5
0
ファイル: sqs_test.py プロジェクト: ekmixon/amira
def sqs_handler():
    with patch('amira.sqs.boto3') as mock_boto3:
        handler = SqsHandler('us-west-1', 'godzilla')
        mock_boto3.resource.assert_called_once_with('sqs', region_name='us-west-1')
        mock_boto3.resource.return_value.get_queue_by_name.assert_called_once_with(
            QueueName='godzilla',
        )
        yield handler
コード例 #6
0
ファイル: sqs_test.py プロジェクト: threathive/amira
    def test_queue_not_found(self):
        boto.sqs.connect_to_region = MagicMock()
        sqs_connection_mock = boto.sqs.connect_to_region.return_value
        sqs_connection_mock.get_queue.return_value = None

        with pytest.raises(SqsQueueNotFoundException) as e:
            SqsHandler('us-west-1', 'godzilla')

        assert 'SQS queue godzilla not found.' == str(e.value)
        boto.sqs.connect_to_region.assert_called_once_with('us-west-1')
        sqs_connection_mock.get_queue.assert_called_once_with('godzilla')
コード例 #7
0
 def __init__(self, region_name, queue_name):
     self._sqs_handler = SqsHandler(region_name, queue_name)
     self._s3_handler = S3Handler()
     self._results_uploader = []
コード例 #8
0
class AMIRA():
    """Runs the automated analysis based on the new elements in an S3
    bucket:
        1. Receives the messages from the SQS queue about the new
           objects in the S3 bucket.
        2. Retrieves the objects (OSXCollector output files) from the
           bucket.
        3. Runs the Analayze Filter on the retrieved OSXCollector
           output.
        4. Uploads the analysis results.

    JIRA integration is optional. If any of the JIRA parameters
    (`jira_server`, `jira_user`, `jira_password` or `jira_project`)
    is not supplied or `None`, attaching the analysis results to a JIRA
    issue will be skipped.

    :param region_name: The AWS region name where the SQS queue
                        containing the S3 event notifications is
                        configured.
    :type region_name: string
    :param queue_name: The name of the SQS queue containing the S3
                       event notifications.
    :type queue_name: string
    """

    def __init__(self, region_name, queue_name):
        self._sqs_handler = SqsHandler(region_name, queue_name)
        self._s3_handler = S3Handler()
        self._results_uploader = []

    def register_results_uploader(self, results_uploader):
        """Registers results uploader.

        Results uploader will upload the analysis results and the
        summary to a specific destination after the analysis is
        finished.
        """
        self._results_uploader.append(results_uploader)

    def run(self):
        """Fetches the OSXCollector output from an S3 bucket based on
        the S3 ObjectCreated event notifications and runs the Analyze
        Filter on the output file.
        Once the analysis is finished the output and the "very readable
        output" files are uploaded to the target S3 bucket.
        """
        created_objects = self._sqs_handler.get_created_objects()

        for created_object in created_objects:
            if (created_object.key_name.endswith('.tar.gz')):
                self._process_created_object(created_object)
            else:
                logging.warn(
                    'S3 object {0} name should end with ".tar.gz"'
                    .format(created_object.key_name))

    def _process_created_object(self, created_object):
        """Fetches the object from an S3 bucket and runs the analysis.
        Then it sends the results to the target S3 bucket and attaches
        them to the JIRA ticket.
        """
        # fetch the OSXCollector output from the S3 bucket
        self._osxcollector_output = self._s3_handler.get_contents_as_string(
            created_object.bucket_name, created_object.key_name)
        self._extract_osxcollector_output_json_file()

        try:
            self._run_analyze_filter()
            self._upload_analysis_results(created_object.key_name)
        except:
            # Log the exception and do not try any recovery.
            # The message that caused the exception will be deleted from the
            # SQS queue to prevent the same exception from happening in the
            # future.
            logging.exception(
                'Unexpected error while running the Analyze Filter')

    def _extract_osxcollector_output_json_file(self):
        """Extracts JSON file containing the OSXCollector output from
        tar.gz archive. It will look in the archive contents for the
        file with the extension ".json". If no file with this extension
        is found in the archive or more than one JSON file is found, it
        will raise `OSXCollectorOutputExtractionError`.
        """
        # create a file-like object based on the S3 object contents as string
        fileobj = StringIO(self._osxcollector_output)
        tar = tarfile.open(mode='r:gz', fileobj=fileobj)
        json_tarinfo = [t for t in tar if t.name.endswith('.json')]

        if 1 != len(json_tarinfo):
            raise OSXCollectorOutputExtractionError(
                'Expected 1 JSON file inside the OSXCollector output archive, '
                'but found {0} instead.'.format(len(json_tarinfo)))

        tarinfo = json_tarinfo[0]
        self._osxcollector_output_json_file = tar.extractfile(tarinfo)
        logging.info(
            'Extracted OSXCollector output JSON file {0}'.format(tarinfo.name))

    def _run_analyze_filter(self):
        """Runs Analyze Filter on the OSXCollector output retrieved
        from an S3 bucket.
        """
        self._analysis_output = StringIO()
        self._text_analysis_summary = StringIO()
        self._html_analysis_summary = StringIO()

        analyze_filter = AnalyzeFilter(
            monochrome=True,
            text_output_file=self._text_analysis_summary,
            html_output_file=self._html_analysis_summary)

        _run_filter(
            analyze_filter,
            input_stream=self._osxcollector_output_json_file,
            output_stream=self._analysis_output)

        # rewind the output files
        self._analysis_output.seek(0)
        self._text_analysis_summary.seek(0)
        self._html_analysis_summary.seek(0)

    def _upload_analysis_results(self, osxcollector_output_filename):
        # drop the file extension (".tar.gz")
        filename_without_extension = osxcollector_output_filename[:-7]

        analysis_output_filename = '{0}_analysis.json'.format(
            filename_without_extension)
        text_analysis_summary_filename = '{0}_summary.txt'.format(
            filename_without_extension)
        html_analysis_summary_filename = '{0}_summary.html'.format(
            filename_without_extension)

        results = [
            FileMetaInfo(
                osxcollector_output_filename,
                StringIO(self._osxcollector_output), 'application/gzip'),
            FileMetaInfo(
                analysis_output_filename, self._analysis_output,
                'application/json'),
            FileMetaInfo(
                text_analysis_summary_filename, self._text_analysis_summary,
                'text'),
            FileMetaInfo(
                html_analysis_summary_filename, self._html_analysis_summary,
                'text/html; charset=UTF-8'),
        ]

        for results_uploader in self._results_uploader:
            results_uploader.upload_results(results)
コード例 #9
0
ファイル: amira.py プロジェクト: royaflash/amira
 def __init__(self, region_name, queue_name):
     self._sqs_handler = SqsHandler(region_name, queue_name)
     self._s3_handler = S3Handler()
     self._results_uploader = []
     self._data_feeds = {}
     self._data_processor = OSXCollectorDataProcessor()
コード例 #10
0
ファイル: amira.py プロジェクト: royaflash/amira
class AMIRA(object):
    """Runs the automated analysis based on the new elements in an S3
    bucket:
        1. Receives the messages from the SQS queue about the new
           objects in the S3 bucket.
        2. Retrieves the objects (OSXCollector output files) from the
           bucket.
        3. Runs the Analayze Filter on the retrieved OSXCollector
           output.
        4. Uploads the analysis results.

    JIRA integration is optional. If any of the JIRA parameters
    (`jira_server`, `jira_user`, `jira_password` or `jira_project`)
    is not supplied or `None`, attaching the analysis results to a JIRA
    issue will be skipped.

    :param region_name: The AWS region name where the SQS queue
                        containing the S3 event notifications is
                        configured.
    :type region_name: string
    :param queue_name: The name of the SQS queue containing the S3
                       event notifications.
    :type queue_name: string
    """
    def __init__(self, region_name, queue_name):
        self._sqs_handler = SqsHandler(region_name, queue_name)
        self._s3_handler = S3Handler()
        self._results_uploader = []
        self._data_feeds = {}
        self._data_processor = OSXCollectorDataProcessor()

    def register_results_uploader(self, results_uploader):
        """Registers results uploader.

        Results uploader will upload the analysis results and the
        summary to a specific destination after the analysis is
        finished.
        """
        self._results_uploader.append(results_uploader)

    def register_data_feed(self, feed_name, generator):
        """Register data input which to be used by the OsXCollector filters

        :param feed_name: Name of the data feed
        :param generator: Generator function providing the data
        """
        self._data_feeds[feed_name] = generator

    def register_data_processor(self, processor):
        """Registers DataProcessor object to process and analyze input data from S3.
        If no processor is registered Amira will fall back using the default
        OSXCollector result processor.

        :param processor: DataProcessor object instance
        """
        self._data_processor = processor

    def run(self):
        """Fetches the OSXCollector output from an S3 bucket based on
        the S3 ObjectCreated event notifications and runs the Analyze
        Filter on the output file.
        Once the analysis is finished the output and the "very readable
        output" files are uploaded to the target S3 bucket.
        """
        created_objects = self._sqs_handler.get_created_objects()

        for created_object in created_objects:
            if created_object.key_name.endswith('.tar.gz'):
                self._process_created_object(created_object)
            else:
                logging.warning(
                    'S3 object {0} name should end with ".tar.gz"'.format(
                        created_object.key_name), )

    def _process_created_object(self, created_object):
        """Fetches the object from an S3 bucket and runs the analysis.
        Then it sends the results to the target S3 bucket and attaches
        them to the JIRA ticket.
        """
        # fetch forensic data from the S3 bucket
        forensic_output = self._s3_handler.get_contents_as_string(
            created_object.bucket_name,
            created_object.key_name,
        )
        processed_input = self._data_processor.process_input(forensic_output)

        try:
            self._data_processor.perform_analysis(processed_input,
                                                  self._data_feeds)
        except Exception as exc:
            # Log the exception and do not try any recovery.
            # The message that caused the exception will be deleted from the
            # SQS queue to prevent the same exception from happening in the
            # future.
            logging.warning(
                'Unexpected error while running the Analyze Filter for the '
                'object {}: {}'.format(created_object.key_name, exc), )
        try:
            self._data_processor.upload_results(
                created_object.key_name[:-7],
                self._results_uploader,
            )
        except Exception:
            logging.exception(
                'Unexpected error while uploading results for the '
                'object: {0}'.format(created_object.key_name), )