def test_file_event_query_str_with_page_size_gives_correct_json_representation( event_filter_group): file_event_query = FileEventQuery(event_filter_group) file_event_query.page_size = 500 json_query_str = JSON_QUERY_BASE.format("AND", event_filter_group, 1, 500, "asc", "eventId") assert str(file_event_query) == json_query_str
def test_file_event_query_str_with_sort_direction_gives_correct_json_representation( event_filter_group, ): file_event_query = FileEventQuery(event_filter_group) file_event_query.sort_direction = "desc" json_query_str = JSON_QUERY_BASE.format("AND", event_filter_group, 1, 10000, "desc", "eventId") assert str(file_event_query) == json_query_str
def test_file_event_query_str_with_sort_key_gives_correct_json_representation( event_filter_group): file_event_query = FileEventQuery(event_filter_group) file_event_query.sort_key = "some_field_to_sort_by" json_query_str = JSON_QUERY_BASE.format("AND", event_filter_group, 1, 10000, "asc", "some_field_to_sort_by") assert str(file_event_query) == json_query_str
def test_file_event_str_gives_correct_json_representation_when_pg_token_is_set( event_filter_group, ): query = FileEventQuery() assert query.page_token is None assert ( str(query) == '{"groupClause":"AND", "groups":[], "srtDir":"asc", "srtKey":"eventId", "pgNum":1, "pgSize":10000}' ) query.page_token = "abc" assert ( str(query) == '{"groupClause":"AND", "groups":[], "srtDir":"asc", "srtKey":"eventId", "pgToken":"abc", "pgSize":10000}' )
def test_file_event_query_repr_does_not_throw_type_error(): # On python 2, `repr` doesn't throw. # On python 3, if `repr` doesn't return type `str`, then an exception is thrown. try: _ = repr(FileEventQuery()) except TypeError: assert False
def test_search_all_file_events_handles_escaped_quote_chars_in_token( self, connection, preservation_data_service, saved_search_service, storage_service_factory, ): file_event_service = FileEventService(connection) security_client = SecurityDataClient( file_event_service, preservation_data_service, saved_search_service, storage_service_factory, ) escaped_token = r"1234_\"abcde\"" security_client.search_all_file_events(FileEventQuery.all(), escaped_token) expected = { "groupClause": "AND", "groups": [], "srtDir": "asc", "srtKey": "eventId", "pgToken": escaped_token, "pgSize": 10000, } connection.post.assert_called_once_with(FILE_EVENT_URI, json=expected)
def test_search_all_file_events_calls_search_with_expected_params_when_pg_token_is_passed( self, connection, preservation_data_service, saved_search_service, storage_service_factory, ): file_event_service = FileEventService(connection) successful_response = { "totalCount": None, "fileEvents": None, "nextPgToken": "pqr", "problems": None, } connection.post.return_value = successful_response security_client = SecurityDataClient( file_event_service, preservation_data_service, saved_search_service, storage_service_factory, ) query = FileEventQuery.all() response = security_client.search_all_file_events(query, "abc") expected = { "groupClause": "AND", "groups": [], "srtDir": "asc", "srtKey": "eventId", "pgToken": "abc", "pgSize": 10000, } connection.post.assert_called_once_with(FILE_EVENT_URI, json=expected) assert response is successful_response
def test_file_event_query_dict_gives_expected_dict_representation(event_filter_group): file_event_query = FileEventQuery(event_filter_group) file_event_query_dict = dict(file_event_query) assert file_event_query_dict["groupClause"] == "AND" assert file_event_query_dict["pgNum"] == 1 assert file_event_query_dict["pgSize"] == 10000 assert file_event_query_dict["srtDir"] == "asc" assert file_event_query_dict["srtKey"] == "eventId" assert type(file_event_query_dict["groups"]) == list
def _construct_query(state, begin, end, saved_search, advanced_query, or_query): if advanced_query: state.search_filters = advanced_query elif saved_search: state.search_filters = saved_search._filter_group_list else: if begin or end: state.search_filters.append( create_time_range_filter(f.EventTimestamp, begin, end)) if or_query: state.search_filters = convert_to_or_query(state.search_filters) query = FileEventQuery(*state.search_filters) query.page_size = MAX_EVENT_PAGE_SIZE query.sort_direction = "asc" query.sort_key = "insertionTimestamp" return query
def test_search_file_events(self, connection): start_date = datetime.utcnow() - timedelta(1) end_date = datetime.utcnow() start_timestamp = convert_datetime_to_epoch(start_date) end_timestamp = convert_datetime_to_epoch(end_date) date_query = EventTimestamp.in_range(start_timestamp, end_timestamp) query = FileEventQuery.all(date_query) response = connection.securitydata.search_file_events(query) assert_successful_response(response)
def get_query(self, search_id): """Get the saved search in form of a query(`py42.sdk.queries.fileevents.file_event_query`). Args: search_id (str): Unique search Id of the saved search. Returns: :class:`py42.sdk.queries.fileevents.file_event_query.FileEventQuery` """ response = self.get_by_id(search_id) search = response[u"searches"][0] return FileEventQuery.from_dict(search)
def test_file_event_query_from_dict_gives_correct_json_representation(): group = { "filterClause": "AND", "filters": [{"operator": "IS", "term": "testterm", "value": "testval"}], } group_str = '{"filterClause":"AND", "filters":[{"operator":"IS", "term":"testterm", "value":"testval"}]}' file_event_query_dict = {"groupClause": "AND", "groups": [group]} file_event_query = FileEventQuery.from_dict(file_event_query_dict) json_query_str = JSON_QUERY_BASE.format( "AND", group_str, 1, 10000, "asc", "eventId" ) assert str(file_event_query) == json_query_str
def get_query(self, search_id, page_number=None, page_size=None): """Get the saved search in form of a query(`py42.sdk.queries.fileevents.file_event_query`). Args: search_id (str): Unique search Id of the saved search. page_number (int, optional): The consecutive group of results of size page_size in the result set to return. Defaults to None. page_size (int, optional): The maximum number of results to be returned. Defaults to None. Returns: :class:`py42.sdk.queries.fileevents.file_event_query.FileEventQuery` """ response = self.get_by_id(search_id) search = response[u"searches"][0] return FileEventQuery.from_dict( search, page_number=page_number, page_size=page_size )
def test_search_all_file_events_when_token_is_none_succeeds( self, connection, preservation_data_service, saved_search_service, storage_service_factory, ): file_event_service = FileEventService(connection) security_client = SecurityDataClient( file_event_service, preservation_data_service, saved_search_service, storage_service_factory, ) security_client.search_all_file_events(FileEventQuery.all(), page_token=None)
def my_command(state, username): # get user devices user = state.sdk.users.get_by_username(username) devices = state.sdk.devices.get_all(user_uid=user["users"][0]["userUid"]) devices_df = pd.json_normalize( next(devices)["computers"])[["name", "active", "guid", "alertStates"]] # get recent file events query = FileEventQuery.all( DeviceUsername.eq(username), EventTimestamp.within_the_last(EventTimestamp.THREE_DAYS), ) search_results = state.sdk.securitydata.search_file_events(query) events_df = pd.json_normalize(search_results["fileEvents"])[[ "eventType", "eventTimestamp", "fileName", "fileSize", "fileCategory" ]] # print results click.echo_via_pager("Devices:\n{}\n\nEvents:\n{}".format( devices_df.to_string(index=False), events_df.to_string(index=False)))
def test_saved_search_calls_extractor_extract_and_saved_search_execute( runner, cli_state, file_event_extractor): search_query = { "groupClause": "AND", "groups": [ { "filterClause": "AND", "filters": [{ "operator": "ON_OR_AFTER", "term": "eventTimestamp", "value": "2020-05-01T00:00:00.000Z", }], }, { "filterClause": "OR", "filters": [ { "operator": "IS", "term": "eventType", "value": "DELETED" }, { "operator": "IS", "term": "eventType", "value": "EMAILED" }, { "operator": "IS", "term": "eventType", "value": "MODIFIED" }, { "operator": "IS", "term": "eventType", "value": "READ_BY_AP" }, { "operator": "IS", "term": "eventType", "value": "CREATED" }, ], }, ], "pgNum": 1, "pgSize": 10000, "srtDir": "asc", "srtKey": "eventId", } query = FileEventQuery.from_dict(search_query) cli_state.sdk.securitydata.savedsearches.get_query.return_value = query runner.invoke(cli, ["security-data", "search", "--saved-search", "test_id"], obj=cli_state) assert file_event_extractor.extract.call_count == 1 assert str(file_event_extractor.extract.call_args[0][0]) in str(query) assert str(file_event_extractor.extract.call_args[0][1]) in str(query)
def test_file_event_query_str_with_many_filters_or_specified_gives_correct_json_representation( event_filter_group_list, ): file_event_query = FileEventQuery(event_filter_group_list, group_clause="OR") json_query_str = build_query_json("OR", event_filter_group_list) assert str(file_event_query) == json_query_str
def test_file_event_query_str_with_many_filters_gives_correct_json_representation( event_filter_group_list, ): file_event_query = FileEventQuery(event_filter_group_list) json_query_str = build_query_json("AND", event_filter_group_list) assert str(file_event_query) == json_query_str
def test_file_event_query_str_with_single_filter_and_specified_gives_correct_json_representation( event_filter_group, ): file_event_query = FileEventQuery(event_filter_group, group_clause="AND") json_query_str = build_query_json("AND", event_filter_group) assert str(file_event_query) == json_query_str
def test_file_event_query_unicode_with_single_filter_gives_correct_json_representation( unicode_event_filter_group, ): file_event_query = FileEventQuery(unicode_event_filter_group) json_query_str = build_query_json("AND", unicode_event_filter_group) assert str(file_event_query) == json_query_str
def test_file_event_query_constructs_successfully(event_filter_group): assert FileEventQuery(event_filter_group)
def _create_test_query(test_filename="*"): return FileEventQuery(FileName.eq(test_filename))
def to_all_query(self): """Convert list of search criteria to *args""" query = FileEventQuery.all(*self._filters) if self._pg_size: query.page_size = self._pg_size return query
def _search_by_hash(self, checksum, checksum_type): query = FileEventQuery.all(checksum_type.eq(checksum)) query.sort_key = u"eventTimestamp" query.sort_direction = u"desc" response = self.search_file_events(query) return response
def _search_by_hash(self, hash, type): query = FileEventQuery.all(type.eq(hash)) response = self.search_file_events(query) return response