def test_lookup_range_str_to_timestamp_passed(): start_time = "2021-02-12 08:00:00" end_time = "2021-02-13 08:00:00" start_dt, end_dt = lookup_range_str_to_timestamp(start_time, end_time) assert start_dt == datetime(2021, 2, 12, 8, 0) assert end_dt == datetime(2021, 2, 13, 8, 0) start_time = None end_time = "2021-02-13 08:00:00" start_dt, end_dt = lookup_range_str_to_timestamp(start_time, end_time) assert start_dt == datetime(2021, 2, 13, 7, 0) assert end_dt == datetime(2021, 2, 13, 8, 0)
def main(loggroupname, starttime, endtime, limit, profile, region): session = Session(profile_name=profile) client = session.client("logs", region_name=region) start_dt, end_dt = lookup_range_str_to_timestamp(starttime, endtime, lookup_hours=LOOKUP_HOURS, local_to_utc=False) query_id = client.start_query( logGroupName=loggroupname, startTime=int( start_dt.timestamp() ), # Although doc says this is the number of seconds UTC, it takes local time and convert to UTC endTime=int(end_dt.timestamp()), queryString=DEFAULT_QUERY_STRING, limit=limit, )["queryId"] # TODO What if more than 10000 messages? response = None # "Scheduled"|"Running"|"Complete"|"Failed"|"Cancelled" while response is None or response.get("status") == "Running": response = client.get_query_results(queryId=query_id) sleep(1) cnt = 0 for item in response.get("results", []): v = [k["value"] for k in item if k["field"] == "@message"][0] t = [k["value"] for k in item if k["field"] == "@timestamp"][0] print(t, v) cnt += 1 print(f"Number of items returned: {cnt}")
def query(ls, loggroupname, starttime, endtime, querystring, limit, profile, region): session = Session(profile_name=profile) client = session.client('logs', region_name=region) start_dt, end_dt = lookup_range_str_to_timestamp(starttime, endtime, lookup_hours=LOOKUP_HOURS, local_to_utc=False) query_string = querystring if querystring else PREDEFINED_QUERIES['Default'] query_id = client.start_query( logGroupName=loggroupname, startTime=int( start_dt.timestamp() ), # Although doc says this is the number of seconds UTC, it takes local time and convert to UTC endTime=int(end_dt.timestamp()), queryString=query_string, limit= limit, # This value overrides the limit specified in the queryString )['queryId'] response = None # Valid status: 'Scheduled'|'Running'|'Complete'|'Failed'|'Cancelled' while response is None or response.get('status') in [ 'Running', 'Scheduled' ]: response = client.get_query_results(queryId=query_id) sleep(1) cnt = 0 for item in response.get('results', []): cnt += 1 print(f'# Item {cnt} --------------------') print( yaml.dump({ field['field']: json.loads(field['value']) if field['value'][0] == '{' else field['value'] for field in item if field['field'] != '@ptr' })) print(f'Number of items returned: {cnt}')
def new_operation_params(start_time, end_time, event_name, user_name): start_dt, end_dt = lookup_range_str_to_timestamp(start_time, end_time, lookup_hours=LOOKUP_HOURS, local_to_utc=True) lookup_attributes = [ ] # If more than one attribute, they are evaluated as "OR". if event_name is not None: lookup_attributes.append({ "AttributeKey": "EventName", "AttributeValue": event_name }) if user_name is not None: lookup_attributes.append({ "AttributeKey": "Username", "AttributeValue": user_name }) operation_params = {"StartTime": start_dt, "EndTime": end_dt} if lookup_attributes: operation_params["LookupAttributes"] = lookup_attributes return operation_params
def test_lookup_range_str_to_timestamp_failed_invalid_format(): start_time = "2021-02-13 08:00" end_time = None with pytest.raises(Exception) as e_info: lookup_range_str_to_timestamp(start_time, end_time)
def test_lookup_range_str_to_timestamp_failed_greater_start_time(): start_time = "2021-02-13 08:00:00" end_time = "2021-02-12 08:00:00" with pytest.raises(Exception) as e_info: lookup_range_str_to_timestamp(start_time, end_time)