def save_request(self, **args): # the the ip from database try: flow = Flow.objects.get(hash_value=args['hash_value']) flow_details = flow.details for detail in flow_details: # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_orig.dat source_str = ":".join([detail.src_ip, str(detail.sport)]) destination_str = ":".join([detail.dst_ip, str(detail.dport)]) flow_str = "-".join([source_str, destination_str]) orig_file = "_".join(["contents", flow_str,"orig.dat"]) file_path = "/".join([args['path'], orig_file]) file_path = str(file_path) strings = ["GET", "PUT", "POST"] file_handler = FileHandler() requests = [] search_li = file_handler.search(file_path, strings) if not search_li: continue for item in search_li: requests.append(item[0]) # i am making a hacky thing here, finding empty lines, each request is separeted with an empty line empty_lines = [] strings = ["\r\n\r\n"] search_li = file_handler.search(file_path, strings) if not search_li: continue for item in search_li: empty_lines.append(item[0]) for x in range(len(requests)): # here i have the request header data = file_handler.data request = data[requests[x]:empty_lines[x]] request_li = request.split("\n") for entry in request_li: # the first line is method and uri with version information info = entry.split(":") if len(info) == 1: info = info[0].split() method = info[0] uri = info[1] version = info[2].split("/")[1] try: http_details = HTTPDetails.objects.get(http_type="request", method=method, uri=uri, headers=request_li, version=version, flow_deatils=detail) except: http_details = HTTPDetails(http_type="request", method=method, uri=uri, headers=request_li, version=version, flow_details=detail) http_details.save() return True except Exception, ex: return False
def save_response_headers(self, path, hash_value): try: flow = Flow.objects.get(hash_value=hash_value) flow_details = flow.details for detail in flow_details: # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_resp.dat source_str = ":".join([detail.src_ip, str(detail.sport)]) destination_str = ":".join([detail.dst_ip, str(detail.dport)]) flow_str = "-".join([source_str, destination_str]) resp_file = "_".join(["contents", flow_str,"resp.dat"]) file_path = "/".join([path, resp_file]) # path is created as unicode, convert it a regular string for hachoir operation file_path = str(file_path) strings = ["HTTP/1.1"] file_handler = FileHandler() responses = [] search_li = file_handler.search(file_path, strings) if not search_li: continue for item in search_li: responses.append(item[0]) empty_lines = [] strings = ["\r\n\r\n"] search_li = file_handler.search(file_path, strings) if not search_li: continue for item in search_li: empty_lines.append(item[0]) for x in range(len(responses)): # here i have the request header data = file_handler.data response = data[responses[x]:empty_lines[x]] response_li = response.split("\n") header = info = version = status = content_type = content_encoding = None for entry in response_li: # the first line is method and uri with version information info = entry.split(":") if len(info) == 1: info = info[0].split() version = info[0].split("/")[1] status = info[1] header = response_li else: if "Content-Type" in info: content_type = info[1] if filter(lambda x: "gzip" in x, info): content_encoding = "gzip" http_li = filter(lambda x: x.flow_details.id == detail.id, HTTPDetails.objects.filter(http_type="response", version=version, headers=header, status=status, content_type=content_type, content_encoding=content_encoding)) #http_details = HTTPDetails.objects.get(http_type="response", version=version, headers=header, status=status, content_type=content_type, content_encoding=content_encoding, flow_details=detail) if len(http_li) == 1: http_details = http_li[0] file_path = os.path.join(path, "html-files") http_details.file_path = file_path http_details.save() else: # encoding error is fixed file_path = os.path.join(path, "html-files") http_details = HTTPDetails(http_type="response", version=version, headers=header, status=status, content_type=content_type, content_encoding=content_encoding, file_path=file_path, flow_details=detail) http_details.save() return True except: return False