Ejemplo n.º 1
0
    def test_writing_event_defaults(self):
        query = SearchQuery()
        default_event = {}
        self.populate_test_event(default_event)
        self.refresh(self.event_index_name)

        query.add_must(ExistsMatch('summary'))
        results = query.execute(self.es_client)
        assert len(results['hits']) == 1
        assert sorted(results['hits'][0].keys()) == [
            '_id', '_index', '_score', '_source', '_type'
        ]
        saved_event = results['hits'][0]['_source']
        assert 'category' in saved_event
        assert 'details' in saved_event
        assert 'hostname' in saved_event
        assert 'mozdefhostname' in saved_event
        assert 'processid' in saved_event
        assert 'processname' in saved_event
        assert 'receivedtimestamp' in saved_event
        assert 'severity' in saved_event
        assert 'source' in saved_event
        assert 'summary' in saved_event
        assert 'tags' in saved_event
        assert 'timestamp' in saved_event
        assert 'utctimestamp' in saved_event
        assert 'category' in saved_event
Ejemplo n.º 2
0
    def main(self):
        self.parse_config("proxy_drop_executable.conf", ["extensions"])

        search_query = SearchQuery(minutes=20)

        search_query.add_must(
            [
                TermMatch("category", "proxy"),
                TermMatch("details.proxyaction", "TCP_DENIED"),
            ]
        )

        # Only notify on certain file extensions from config
        filename_regex = "/.*\.({0})/".format(self.config.extensions.replace(",", "|"))
        search_query.add_must(
            [QueryStringMatch("details.destination: {}".format(filename_regex))]
        )

        self.filtersManual(search_query)

        # Search aggregations on field 'hostname', keep X samples of
        # events at most
        self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
        # alert when >= X matching events in an aggregation
        # I think it makes sense to alert every time here
        self.walkAggregations(threshold=1)
Ejemplo n.º 3
0
    def test_writing_with_type(self):
        query = SearchQuery()
        default_event = {
            "_type": "example",
            "_source": {
                "receivedtimestamp": UnitTestSuite.current_timestamp(),
                "summary": "Test summary",
                "details": {
                    "note": "Example note",
                }
            }
        }
        self.populate_test_event(default_event)
        self.refresh(self.event_index_name)

        query.add_must(ExistsMatch('summary'))
        results = query.execute(self.es_client)
        assert len(results['hits']) == 1
        assert sorted(results['hits'][0].keys()) == [
            '_id', '_index', '_score', '_source', '_type'
        ]
        assert results['hits'][0]['_type'] == 'example'
        assert results['hits'][0]['_source']['summary'] == 'Test summary'
        assert results['hits'][0]['_source']['details'] == {
            "note": "Example note"
        }
Ejemplo n.º 4
0
    def test_beginning_time_seconds_received_timestamp(self):
        query = SearchQuery(seconds=10)
        query.add_must(ExistsMatch('summary'))
        assert query.date_timedelta == {'seconds': 10}

        default_event = {
            "receivedtimestamp": UnitTestSuite.current_timestamp(),
            "summary": "Test summary",
            "details": {
                "note": "Example note",
            }
        }
        self.populate_test_event(default_event)

        too_old_event = default_event
        too_old_event[
            'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp(
                {'seconds': 11})
        too_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp(
            {'seconds': 11})
        self.populate_test_event(too_old_event)

        not_old_event = default_event
        not_old_event[
            'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp(
                {'seconds': 9})
        not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp(
            {'seconds': 9})
        self.populate_test_event(not_old_event)

        self.refresh(self.event_index_name)

        results = query.execute(self.es_client)
        assert len(results['hits']) == 2
Ejemplo n.º 5
0
 def process_alert(self):
     search_query = SearchQuery(minutes=20)
     content = QueryStringMatch(str(self.watchterm))
     search_query.add_must(content)
     self.filtersManual(search_query)
     self.searchEventsSimple()
     self.walkEvents()
Ejemplo n.º 6
0
def kibanaDashboards():
    resultsList = []
    try:
        es_client = ElasticsearchClient(
            (list('{0}'.format(s) for s in options.esservers)))
        search_query = SearchQuery()
        search_query.add_must(TermMatch('type', 'dashboard'))
        results = search_query.execute(es_client, indices=['.kibana'])

        for dashboard in results['hits']:
            dashboard_id = dashboard['_id']
            if dashboard_id.startswith('dashboard:'):
                dashboard_id = dashboard_id.replace('dashboard:', '')

            resultsList.append({
                'name':
                dashboard['_source']['dashboard']['title'],
                'id':
                dashboard_id
            })

    except ElasticsearchInvalidIndex as e:
        logger.error('Kibana dashboard index not found: {0}\n'.format(e))

    except Exception as e:
        logger.error('Kibana dashboard received error: {0}\n'.format(e))

    return json.dumps(resultsList)
    def test_writing_event_defaults(self):
        query = SearchQuery()
        default_event = {}
        self.populate_test_event(default_event)
        self.refresh(self.event_index_name)

        query.add_must(ExistsMatch('summary'))
        results = query.execute(self.es_client)
        assert len(results['hits']) == 1
        assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source', '_type']
        saved_event = results['hits'][0]['_source']
        assert 'category' in saved_event
        assert 'details' in saved_event
        assert 'hostname' in saved_event
        assert 'mozdefhostname' in saved_event
        assert 'processid' in saved_event
        assert 'processname' in saved_event
        assert 'receivedtimestamp' in saved_event
        assert 'severity' in saved_event
        assert 'source' in saved_event
        assert 'summary' in saved_event
        assert 'tags' in saved_event
        assert 'timestamp' in saved_event
        assert 'utctimestamp' in saved_event
        assert 'category' in saved_event
Ejemplo n.º 8
0
    def test_simple_query_execute(self):
        query = SearchQuery()
        query.add_must(ExistsMatch('note'))
        assert query.date_timedelta == {}

        self.populate_example_event()
        self.refresh(self.event_index_name)

        results = query.execute(self.es_client)

        assert results.keys() == ['hits', 'meta']
        assert results['meta'].keys() == ['timed_out']
        assert results['meta']['timed_out'] is False
        assert len(results['hits']) == 1

        assert results['hits'][0].keys() == ['_score', '_type', '_id', '_source', '_index']
        assert type(results['hits'][0]['_id']) == unicode
        assert results['hits'][0]['_type'] == 'event'

        assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d")

        assert results['hits'][0]['_source']['note'] == 'Example note'
        assert results['hits'][0]['_source']['summary'] == 'Test Summary'

        assert results['hits'][0]['_source']['details'].keys() == ['information']
        assert results['hits'][0]['_source']['details']['information'] == 'Example information'

        with pytest.raises(KeyError):
            results['abcdefg']

        with pytest.raises(KeyError):
            results['abcdefg']['test']
Ejemplo n.º 9
0
    def main(self):
        search_query = SearchQuery(minutes=15)

        self.config = self.parse_json_alert_config('ssh_access_signreleng.json')

        if self.config['ircchannel'] == '':
            self.config['ircchannel'] = None

        search_query.add_must([
            TermMatch('tags', 'releng'),
            TermMatch('details.program', 'sshd'),
            QueryStringMatch('hostname: /{}/'.format(self.config['hostfilter'])),
            PhraseMatch('summary', 'Accepted publickey for ')
        ])

        for exclusion in self.config['exclusions']:
            exclusion_query = None
            for key, value in exclusion.iteritems():
                phrase_exclusion = PhraseMatch(key, value)
                if exclusion_query is None:
                    exclusion_query = phrase_exclusion
                else:
                    exclusion_query = exclusion_query + phrase_exclusion

            search_query.add_must_not(exclusion_query)

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 10
0
    def test_simple_query_execute(self):
        query = SearchQuery()
        query.add_must(ExistsMatch('note'))
        assert query.date_timedelta == {}

        self.populate_example_event()
        self.refresh(self.event_index_name)

        results = query.execute(self.es_client)

        assert sorted(results.keys()) == ['hits', 'meta']
        assert list(results['meta'].keys()) == ['timed_out']
        assert results['meta']['timed_out'] is False
        assert len(results['hits']) == 1

        assert sorted(results['hits'][0].keys()) == ['_id', '_index', '_score', '_source']
        assert type(results['hits'][0]['_id']) == str

        assert results['hits'][0]['_index'] == datetime.now().strftime("events-%Y%m%d")

        assert results['hits'][0]['_source']['note'] == 'Example note'
        assert results['hits'][0]['_source']['summary'] == 'Test Summary'
        assert results['hits'][0]['_source']['type'] == 'event'

        assert list(results['hits'][0]['_source']['details'].keys()) == ['information']
        assert results['hits'][0]['_source']['details']['information'] == 'Example information'

        with pytest.raises(KeyError):
            results['abcdefg']

        with pytest.raises(KeyError):
            results['abcdefg']['test']
Ejemplo n.º 11
0
 def process_alert(self, term):
     search_query = SearchQuery(minutes=20)
     content = QueryStringMatch(str(term))
     search_query.add_must(content)
     self.filtersManual(search_query)
     self.searchEventsSimple()
     self.walkEvents()
Ejemplo n.º 12
0
def kibanaDashboards():
    resultsList = []
    try:
        es_client = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
        search_query = SearchQuery()
        search_query.add_must(TermMatch('_type', 'dashboard'))
        results = search_query.execute(es_client, indices=['.kibana'])

        for dashboard in results['hits']:
            resultsList.append({
                'name': dashboard['_source']['title'],
                'url': "%s#/%s/%s" % (
                    options.kibanaurl,
                    "dashboard",
                    dashboard['_id']
                )
            })

    except ElasticsearchInvalidIndex as e:
        sys.stderr.write('Kibana dashboard index not found: {0}\n'.format(e))

    except Exception as e:
        sys.stderr.write('Kibana dashboard received error: {0}\n'.format(e))

    return json.dumps(resultsList)
Ejemplo n.º 13
0
    def test_without_time_defined(self):
        query = SearchQuery()
        query.add_must(ExistsMatch('summary'))
        assert query.date_timedelta == {}

        default_event = {
            "utctimestamp": UnitTestSuite.current_timestamp(),
            "summary": "Test summary",
            "details": {
                "note": "Example note",
            }
        }

        self.populate_test_event(default_event)
        default_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp({'days': 11})
        default_event['receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp({'days': 11})
        self.populate_test_event(default_event)

        not_old_event = default_event
        not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp({'days': 9})
        not_old_event['receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp({'days': 9})
        self.populate_test_event(not_old_event)

        self.refresh(self.event_index_name)

        results = query.execute(self.es_client)
        assert len(results['hits']) == 3
Ejemplo n.º 14
0
    def test_without_time_defined(self):
        query = SearchQuery()
        query.add_must(ExistsMatch('summary'))
        assert query.date_timedelta == {}

        default_event = {
            "utctimestamp": UnitTestSuite.current_timestamp(),
            "summary": "Test summary",
            "details": {
                "note": "Example note",
            }
        }

        self.populate_test_event(default_event)
        default_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp(
            {'days': 11})
        default_event[
            'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp(
                {'days': 11})
        self.populate_test_event(default_event)

        not_old_event = default_event
        not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp(
            {'days': 9})
        not_old_event[
            'receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp(
                {'days': 9})
        self.populate_test_event(not_old_event)

        self.refresh(self.event_index_name)

        results = query.execute(self.es_client)
        assert len(results['hits']) == 3
Ejemplo n.º 15
0
    def test_beginning_time_seconds_received_timestamp(self):
        query = SearchQuery(seconds=10)
        query.add_must(ExistsMatch('summary'))
        assert query.date_timedelta == {'seconds': 10}

        default_event = {
            "receivedtimestamp": UnitTestSuite.current_timestamp(),
            "summary": "Test summary",
            "details": {
                "note": "Example note",
            }
        }
        self.populate_test_event(default_event)

        too_old_event = default_event
        too_old_event['receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp({'seconds': 11})
        too_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp({'seconds': 11})
        self.populate_test_event(too_old_event)

        not_old_event = default_event
        not_old_event['receivedtimestamp'] = UnitTestSuite.subtract_from_timestamp({'seconds': 9})
        not_old_event['utctimestamp'] = UnitTestSuite.subtract_from_timestamp({'seconds': 9})
        self.populate_test_event(not_old_event)

        self.refresh(self.event_index_name)

        results = query.execute(self.es_client)
        assert len(results['hits']) == 2
Ejemplo n.º 16
0
def getESAlerts(es):
    search_query = SearchQuery(minutes=50)
    # We use an ExistsMatch here just to satisfy the
    # requirements of a search query must have some "Matchers"
    search_query.add_must(ExistsMatch('summary'))
    results = search_query.execute(es, indices=['alerts'], size=10000)
    return results
Ejemplo n.º 17
0
def getESAlerts(es):
    search_query = SearchQuery(minutes=50)
    # We use an ExistsMatch here just to satisfy the
    # requirements of a search query must have some "Matchers"
    search_query.add_must(ExistsMatch('summary'))
    results = search_query.execute(es, indices=['alerts'], size=10000)
    return results
Ejemplo n.º 18
0
    def main(self):
        search_query = SearchQuery(hours=1)

        search_query.add_must([TermMatch('source', 'cloudtrail')])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 19
0
 def search_and_verify_event(self, expected_event):
     self.refresh('events')
     search_query = SearchQuery(minutes=5)
     search_query.add_must(ExistsMatch('tags'))
     results = search_query.execute(self.es_client)
     assert len(results['hits']) == 1
     saved_event = results['hits'][0]['_source']
     self.verify_event(saved_event, expected_event)
Ejemplo n.º 20
0
 def test_execute_without_size(self):
     for num in range(0, 1200):
         self.populate_example_event()
     self.refresh(self.event_index_name)
     query = SearchQuery()
     query.add_must(ExistsMatch('summary'))
     results = query.execute(self.es_client)
     assert len(results['hits']) == 1000
Ejemplo n.º 21
0
 def search_and_verify_event(self, expected_event):
     self.refresh('events')
     search_query = SearchQuery(minutes=5)
     search_query.add_must(ExistsMatch('tags'))
     results = search_query.execute(self.es_client)
     assert len(results['hits']) == 1
     saved_event = results['hits'][0]['_source']
     self.verify_event(saved_event, expected_event)
Ejemplo n.º 22
0
 def test_execute_without_size(self):
     for num in range(0, 1200):
         self.populate_example_event()
     self.refresh(self.event_index_name)
     query = SearchQuery()
     query.add_must(ExistsMatch('summary'))
     results = query.execute(self.es_client)
     assert len(results['hits']) == 1000
Ejemplo n.º 23
0
    def main(self):
        search_query = SearchQuery(hours=6)

        day_old_date = toUTC(datetime.now() - timedelta(days=1)).isoformat()
        search_query.add_must(LessThanMatch('utctimestamp', day_old_date))
        self.filtersManual(search_query)

        self.searchEventsAggregated('mozdefhostname', samplesLimit=1000)
        self.walkAggregations(threshold=1)
Ejemplo n.º 24
0
 def process_alert(self, alert_config):
     self.current_alert_time_window = int(alert_config['time_window'])
     self.current_alert_time_type = alert_config['time_window_type']
     search_query_time_window = {self.current_alert_time_type: self.current_alert_time_window}
     search_query = SearchQuery(**search_query_time_window)
     search_query.add_must(QueryStringMatch(str(alert_config['search_query'])))
     self.filtersManual(search_query)
     self.searchEventsSimple()
     self.walkEvents(description=alert_config['description'])
Ejemplo n.º 25
0
 def process_alert(self, alert_config):
     self.current_alert_time_window = int(alert_config['time_window'])
     self.current_alert_time_type = alert_config['time_window_type']
     search_query_time_window = {self.current_alert_time_type: self.current_alert_time_window}
     search_query = SearchQuery(**search_query_time_window)
     search_query.add_must(QueryStringMatch(str(alert_config['search_query'])))
     self.filtersManual(search_query)
     self.searchEventsSimple()
     self.walkEvents(description=alert_config['description'])
Ejemplo n.º 26
0
    def main(self):
        search_query = SearchQuery(hours=6)

        day_old_date = toUTC(datetime.now() - timedelta(days=1)).isoformat()
        search_query.add_must(LessThanMatch('utctimestamp', day_old_date))
        self.filtersManual(search_query)

        self.searchEventsAggregated('mozdefhostname', samplesLimit=1000)
        self.walkAggregations(threshold=1)
Ejemplo n.º 27
0
def find(qes: QueryInterface, username: str, index: str) -> Optional[Entry]:
    '''Retrieve the locality state for one user from ElasticSearch.
    '''

    search = SearchQuery()
    search.add_must(
        [TermMatch('type_', 'locality'),
         TermMatch('username', username)])

    return qes(search, index)
Ejemplo n.º 28
0
 def main(self):
     self.parse_config('ldap_password_spray.conf', ['threshold_count', 'search_depth_min'])
     search_query = SearchQuery(minutes=int(self.config.search_depth_min))
     search_query.add_must([
         TermMatch('category', 'ldap'),
         TermMatch('details.response.error', 'LDAP_INVALID_CREDENTIALS')
     ])
     self.filtersManual(search_query)
     self.searchEventsAggregated('details.client', samplesLimit=10)
     self.walkAggregations(threshold=int(self.config.threshold_count))
Ejemplo n.º 29
0
def getSqsStats(es):
    search_query = SearchQuery(minutes=15)
    search_query.add_must([
        TermMatch('type', 'mozdefhealth'),
        TermMatch('category', 'mozdef'),
        TermMatch('tags', 'sqs-latest'),
    ])
    results = search_query.execute(es, indices=['mozdefstate'])

    return results['hits']
Ejemplo n.º 30
0
def getSqsStats(es):
    search_query = SearchQuery(minutes=15)
    search_query.add_must([
        TermMatch('_type', 'mozdefhealth'),
        TermMatch('category', 'mozdef'),
        TermMatch('tags', 'sqs-latest'),
    ])
    results = search_query.execute(es, indices=['mozdefstate'])

    return results['hits']
Ejemplo n.º 31
0
    def main(self):
        search_query = SearchQuery(minutes=2)
        search_query.add_must([
            TermMatch('category', 'syslog'),
            TermMatch('details.program', 'sshd'),
            PhraseMatch('summary', 'Accepted publickey')
        ])

        self.filtersManual(search_query)
        self.searchEventsAggregated('hostname', samplesLimit=10)
        self.walkAggregations(threshold=1)
Ejemplo n.º 32
0
    def main(self):
        search_query = SearchQuery(minutes=15)
        search_query.add_must([
            TermMatch('category', 'syslog'),
            TermMatch('details.program', 'sshd'),
            PhraseMatch('summary', 'Accepted publickey')
        ])

        self.filtersManual(search_query)
        self.searchEventsAggregated('hostname', samplesLimit=10)
        self.walkAggregations(threshold=1)
Ejemplo n.º 33
0
    def main(self):
        search_query = SearchQuery(minutes=30)

        search_query.add_must([
            TermMatch('tags', 'mig-runner-sshioc'),
        ])

        self.filtersManual(search_query)

        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 34
0
    def test_aggregation_without_must_fields(self):
        event = self.generate_default_event()
        event['_source']['utctimestamp'] = event['_source']['utctimestamp']()
        event['_source']['receivedtimestamp'] = event['_source']['receivedtimestamp']()
        self.populate_test_event(event)
        self.refresh(self.event_index_name)

        search_query = SearchQuery(minutes=10)

        search_query.add_aggregation(Aggregation('source'))
        results = search_query.execute(self.es_client)
        assert results['aggregations']['source']['terms'][0]['count'] == 1
Ejemplo n.º 35
0
    def main(self):
        search_query = SearchQuery(minutes=20)

        search_query.add_must([
            TermMatch('source', 'cloudtrail'),
            TermMatch('details.eventname', 'PutBucketPolicy'),
            ExistsMatch('details.requestparameters.bucketpolicy.statement.principal')
        ])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 36
0
    def test_aggregation_without_must_fields(self):
        event = self.generate_default_event()
        event['_source']['utctimestamp'] = event['_source']['utctimestamp']()
        event['_source']['receivedtimestamp'] = event['_source']['receivedtimestamp']()
        self.populate_test_event(event)
        self.refresh(self.event_index_name)

        search_query = SearchQuery(minutes=10)

        search_query.add_aggregation(Aggregation('source'))
        results = search_query.execute(self.es_client)
        assert results['aggregations']['source']['terms'][0]['count'] == 1
Ejemplo n.º 37
0
    def main(self):
        self._config = self.parse_json_alert_config('feedback_events.json')
        search_query = SearchQuery(minutes=30)

        search_query.add_must([
            TermMatch('category', 'user_feedback'),
            TermMatch('details.action', 'escalate')
        ])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 38
0
 def process_alert(self, alert_config):
     search_query = SearchQuery(minutes=int(alert_config.time_window))
     terms = []
     for i in alert_config.filters:
         terms.append(TermMatch(i[0], i[1]))
     terms.append(QueryStringMatch(str(alert_config.search_string)))
     search_query.add_must(terms)
     self.filtersManual(search_query)
     self.searchEventsAggregated(alert_config.aggregation_key,
                                 samplesLimit=int(alert_config.num_samples))
     self.walkAggregations(threshold=int(alert_config.num_aggregations),
                           config=alert_config)
Ejemplo n.º 39
0
    def main(self):
        query = SearchQuery(minutes=15)

        # Search for events from the session invalidation app wherein
        # an authenticated user terminated a user's sessions.
        query.add_must([
            TermMatch('category', 'sessioninvalidation'),
        ])

        self.filtersManual(query)
        self.searchEventsAggregated('details.actor', samplesLimit=1000)
        self.walkAggregations(threshold=1, config=None)
Ejemplo n.º 40
0
    def main(self):
        self._config = self.parse_json_alert_config('feedback_events.json')
        search_query = SearchQuery(minutes=30)

        search_query.add_must([
            TermMatch('category', 'user_feedback'),
            TermMatch('details.action', 'escalate')
        ])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
    def main(self):
        search_query = SearchQuery(minutes=15)

        search_query.add_must([
            TermMatch('category', 'ldapChange'),
            TermMatch("details.actor", "cn=admin,dc=mozilla"),
            PhraseMatch('details.changepairs', 'replace:pwdAccountLockedTime')
        ])
        self.filtersManual(search_query)

        # Search events
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 42
0
    def main(self):
        search_query = SearchQuery(minutes=15)

        search_query.add_must([
            TermMatch('category', 'ldapChange'),
            TermMatch('details.changetype', 'delete')
        ])

        self.filtersManual(search_query)

        # Search events
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 43
0
    def main(self):
        search_query = SearchQuery(minutes=30)

        search_query.add_must([
            TermMatch('source', 'cloudtrail'),
            TermMatch('details.eventname', 'StopLogging')
        ])

        search_query.add_must_not(TermMatch('errorcode', 'AccessDenied'))

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 44
0
    def main(self):
        search_query = SearchQuery(minutes=20)

        search_query.add_must([
            TermMatch('source', 'cloudtrail'),
            TermMatch('details.eventname', 'CreateBucket'),
            TermMatch('details.requestparameters.x-amz-acl',
                      'public-read-write'),
        ])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 45
0
    def main(self):
        search_query = SearchQuery(minutes=15)

        search_query.add_must([
            TermMatch('category', 'ldapChange'),
            TermMatch('details.changetype', 'modify'),
            PhraseMatch("summary", "groups")
        ])

        self.filtersManual(search_query)
        # Search events
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 46
0
    def main(self):
        search_query = SearchQuery(minutes=15)

        search_query.add_must(PhraseMatch('summary', 'Failsafe Duo login'))

        self.filtersManual(search_query)

        # Search aggregations on field 'sourceipaddress', keep X samples of
        # events at most
        self.searchEventsAggregated('hostname', samplesLimit=10)
        # alert when >= X matching events in an aggregation
        # in this case, always
        self.walkAggregations(threshold=1)
    def main(self):
        search_query = SearchQuery(minutes=5)

        search_query.add_must([
            TermMatch('category', 'execve'),
            TermMatch('processname', 'audisp-json'),
            TermMatch('details.processname', 'ssh'),
            PhraseMatch('details.parentprocess', 'sftp')
        ])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 48
0
    def main(self):
        search_query = SearchQuery(minutes=20)

        search_query.add_must([
            TermMatch('source', 'cloudtrail'),
            TermMatch('details.eventname', 'PutBucketPolicy'),
            ExistsMatch(
                'details.requestparameters.bucketpolicy.statement.principal')
        ])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 49
0
    def main(self):
        search_query = SearchQuery(minutes=15)

        search_query.add_must([
            TermMatch('category', 'ldapChange'),
            TermMatch('details.changetype', 'modify'),
            PhraseMatch("summary", "groups")
        ])

        self.filtersManual(search_query)
        # Search events
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 50
0
    def main(self):
        search_query = SearchQuery(minutes=15)

        search_query.add_must([
            TermMatch('category', 'ldapChange'),
            TermMatch("details.actor", "cn=admin,dc=mozilla"),
            PhraseMatch('details.changepairs', 'replace:pwdAccountLockedTime')
        ])
        self.filtersManual(search_query)

        # Search events
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 51
0
    def main(self):
        search_query = SearchQuery(minutes=5)

        search_query.add_must([
            TermMatch('category', 'execve'),
            TermMatch('processname', 'audisp-json'),
            TermMatch('details.processname', 'ssh'),
            PhraseMatch('details.parentprocess', 'sftp')
        ])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 52
0
    def main(self):
        search_query = SearchQuery(minutes=15)

        search_query.add_must([
            TermMatch('category', 'ldapChange'),
            TermMatch('details.changetype', 'add')
        ])

        self.filtersManual(search_query)

        # Search events
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 53
0
    def main(self):
        self.parse_config('duo_authfail.conf', ['url'])
        search_query = SearchQuery(minutes=15)

        search_query.add_must([
            TermMatch('category', 'authentication'),
            ExistsMatch('details.sourceipaddress'),
            ExistsMatch('details.username'),
            PhraseMatch('details.result', 'fraud')
        ])

        self.filtersManual(search_query)
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 54
0
    def main(self):
        self.parse_config('trace_audit.conf', ['hostfilter'])
        search_query = SearchQuery(minutes=5)

        search_query.add_must([
            TermMatch('details.processname', 'strace'),
        ])

        for host in self.config.hostfilter.split():
            search_query.add_must_not(PhraseMatch('hostname', host))

        self.filtersManual(search_query)
        self.searchEventsAggregated('details.originaluser', samplesLimit=10)
        self.walkAggregations(threshold=1)
Ejemplo n.º 55
0
    def main(self):
        self.parse_config('http_auth_bruteforce.conf', ['url'])
        search_query = SearchQuery(minutes=15)

        search_query.add_must([
            TermMatch('category', 'bro'),
            TermMatch('source', 'notice'),
            PhraseMatch('details.note', 'AuthBruteforcing::HTTP_AuthBruteforcing_Attacker')
        ])

        self.filtersManual(search_query)
        # Search events
        self.searchEventsSimple()
        self.walkEvents()
Ejemplo n.º 56
0
    def main(self):
        search_query = SearchQuery(hours=4)

        search_query.add_must([
            TermMatch('category', 'open_port_policy_violation'),
            PhraseMatch('tags', 'open_port_policy_violation')
        ])

        self.filtersManual(search_query)

        # Search aggregations on field 'sourceipaddress', keep X samples of
        # events at most
        self.searchEventsAggregated('details.destinationipaddress', samplesLimit=100)
        # alert when >= X matching events in an aggregation
        self.walkAggregations(threshold=1)
Ejemplo n.º 57
0
    def main(self):
        # Create a query to look back the last 20 minutes
        search_query = SearchQuery(minutes=20)

        # Add search terms to our query
        search_query.add_must([
            TermMatch('source', 'cloudtrail'),
            TermMatch('details.eventverb', 'Describe'),
            ExistsMatch('details.source')
        ])

        self.filtersManual(search_query)
        # We aggregate on details.hostname which is the AWS service name
        self.searchEventsAggregated('details.source', samplesLimit=2)
        self.walkAggregations(threshold=50)
Ejemplo n.º 58
0
    def main(self):
        search_query = SearchQuery(minutes=1)
        search_query.add_must([
            TermMatch('category', 'bro'),
            TermMatch('source', 'notice'),
            PhraseMatch('details.note', 'Scan::Address_Scan'),
            QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustmatch']))
        ])
        search_query.add_must_not([
            QueryStringMatch('details.sourceipaddress: {}'.format(self._config['sourcemustnotmatch']))
        ])

        self.filtersManual(search_query)
        self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
        self.walkAggregations(threshold=1)