def duplicate_request(msg): # 1.ignore unexepcted object if ignore_check(msg): return # 2.prepare useful info origin_url = msg['flow']['request']['url'] sorted_url = sort_params(origin_url[origin_url.rfind('//')+2:]) request_key_list = [ sorted_url, msg['flow']['request']['method'] ] request_key = get_md5_code(request_key_list) request_time = msg['time'] # 3.check data if request_key in HISTORY_URL: history_request_time = HISTORY_URL[request_key]['time'] if request_time - history_request_time < THRESHOLD_TIME: url = HISTORY_URL[request_key]['url'] title = f'Duplicated requests: {url}\n' description = f'Duplicated requests: {url}\n' description += f'First at {history_request_time}\n' description += f'Second at {request_time}\n' event.issue(title, description) # 4.update storage data HISTORY_URL.update({ request_key: { 'time': request_time, 'url': sorted_url } }) overdue_urls = [] for key, value in HISTORY_URL.items(): if time.time() - value['time'] >= THRESHOLD_TIME: overdue_urls.append(key) else: break for overdue_url in overdue_urls: del HISTORY_URL[overdue_url]
def img_size(msg): # 1.ignore unexepcted object if ignore_check(msg): return # 2.prepare useful info img_size = int(msg['flow']['size']) img_size = Decimal(img_size / 1024).quantize(Decimal('0.0')) # 3.check data if img_size > THRESHOLD_IMG_SIZE: img_url = msg['flow']['request']['url'] img_url = img_url[img_url.rfind('//') + 2:] title = f'Image size {img_size}KB is beyond expectations: {img_url}\n' description = f'Image size {img_size}KB is beyond expectations: {img_url}\n' description += f'Expecte: {THRESHOLD_IMG_SIZE}KB\n' description += f'Actual: {img_size}KB\n' event.issue(title, description)