Esempio n. 1
0
    def save_request(self, **args):
        # the the ip from database

        try:
            flow = Flow.objects.get(hash_value=args['hash_value'])
            flow_details = flow.details
            for detail in flow_details:
                # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_orig.dat
                source_str = ":".join([detail.src_ip, str(detail.sport)])
                destination_str = ":".join([detail.dst_ip, str(detail.dport)])
                flow_str = "-".join([source_str, destination_str])
                orig_file = "_".join(["contents", flow_str,"orig.dat"])
                file_path = "/".join([args['path'], orig_file])
                file_path = str(file_path)

                strings = ["GET", "PUT", "POST"]
                file_handler = FileHandler()
                requests = []
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    requests.append(item[0])

                # i am making a hacky thing here, finding empty lines, each request is separeted with an empty line
                empty_lines = []
                strings = ["\r\n\r\n"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    empty_lines.append(item[0])

                for x in range(len(requests)):
                    # here i have the request header
                    data = file_handler.data
                    request = data[requests[x]:empty_lines[x]]
                    request_li = request.split("\n")

                    for entry in request_li:
                        # the first line is method and uri with version information
                        info = entry.split(":")
                        if len(info) == 1:
                            info = info[0].split()
                            method = info[0]
                            uri = info[1]
                            version = info[2].split("/")[1]

                            try:
                                http_details = HTTPDetails.objects.get(http_type="request", method=method, uri=uri, headers=request_li, version=version, flow_deatils=detail)
                            except:
                                http_details = HTTPDetails(http_type="request", method=method, uri=uri, headers=request_li, version=version, flow_details=detail)
                                http_details.save()
            return True

        except Exception, ex:
            return False
Esempio n. 2
0
    def save_response_files(self, path, hash_value):
        try:
            flow = Flow.objects.get(hash_value=hash_value)
            flow_details = flow.details
            for detail in flow_details:
                # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_resp.dat
                source_str = ":".join([detail.src_ip, str(detail.sport)])
                destination_str = ":".join([detail.dst_ip, str(detail.dport)])
                flow_str = "-".join([source_str, destination_str])
                resp_file = "_".join(["contents", flow_str,"resp.dat"])
                file_path = "/".join([path, resp_file])
                # path is created as unicode, convert it a regular string for hachoir operation
                file_path = str(file_path)

                strings = ["Content-Type: text/html", "Content-Type: application/x-javascript", "Content-Type: text/css"]
                file_handler = FileHandler()
                responses = []
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    responses.append(item[0])

                empty_lines = []
                strings = ["\r\n\r\n"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    empty_lines.append(item[0])

                http_lines = []
                strings = ["HTTP/1.1"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    http_lines.append(item[0])

                try:
                    stream = FileInputStream(unicodeFilename(file_path), real_filename=file_path)
                except NullStreamError:
                    continue
                subfile = SearchSubfile(stream, 0, None)
                subfile.loadParsers()
                root = "/".join([path, "html-files"])
                if not os.path.exists(root):
                    os.makedirs(root)
                output = "/".join([root, flow_str])
                output = str(output)
                subfile.setOutput(output)

                for x in range(len(responses)):
                    # here i have the request header
                    data = file_handler.data
                    #f = data[empty_lines[x]:http_lines[x+1]]
                    file_ext = ".txt"
                    #if ("html" in f or "body" in f):
                    #    file_ext = ".html"
                    #elif ("script" in f):
                     #   file_ext = ".js"
                    #else:

                    # select the closest empty line
                    empty_lines.append(responses[x])
                    empty_lines.sort()
                    index = empty_lines.index(responses[x])
                    offset = empty_lines[index+1]

                    size = None
                    try:
                        size = http_lines[x+1]-2
                    except IndexError:
                        size = stream.size

                    f = data[offset+4:size]

                    filename = subfile.output.createFilename(file_ext)
                    w = open("/".join([output, filename]), "w")
                    w.write(f)
                    w.close()

                # saving the hachoir saved binaries to the db with the created txt files
                if detail.protocol == "http":
                    http_files = os.listdir(output)
                    #http_files = filter(lambda x: x.split(".")[-1] != 'txt', http_files) # no need to take the txt files
                    if len(http_files) > 0:
                        http_li = filter(lambda x: x.flow_details.id == detail.id, HTTPDetails.objects.all())
                        for http in http_li:
                            http.files = http_files
                            http.save()

            return True

        except Exception, ex:
            print ex
            return False
Esempio n. 3
0
    def save_response_headers(self, path, hash_value):
        try:
            flow = Flow.objects.get(hash_value=hash_value)
            flow_details = flow.details
            for detail in flow_details:
                # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_resp.dat
                source_str = ":".join([detail.src_ip, str(detail.sport)])
                destination_str = ":".join([detail.dst_ip, str(detail.dport)])
                flow_str = "-".join([source_str, destination_str])
                resp_file = "_".join(["contents", flow_str,"resp.dat"])
                file_path = "/".join([path, resp_file])
                # path is created as unicode, convert it a regular string for hachoir operation
                file_path = str(file_path)

                strings = ["HTTP/1.1"]
                file_handler = FileHandler()
                responses = []
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    responses.append(item[0])

                empty_lines = []
                strings = ["\r\n\r\n"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    empty_lines.append(item[0])

                for x in range(len(responses)):
                    # here i have the request header
                    data = file_handler.data
                    response = data[responses[x]:empty_lines[x]]
                    response_li = response.split("\n")

                    header = info = version = status = content_type = content_encoding = None

                    for entry in response_li:
                        # the first line is method and uri with version information
                        info = entry.split(":")
                        if len(info) == 1:
                            info = info[0].split()
                            version = info[0].split("/")[1]
                            status = info[1]
                            header = response_li

                        else:
                            if "Content-Type" in info:
                                content_type = info[1]

                            if filter(lambda x: "gzip" in x, info):
                                content_encoding = "gzip"


                        http_li = filter(lambda x: x.flow_details.id == detail.id, HTTPDetails.objects.filter(http_type="response",
                            version=version, headers=header, status=status,
                            content_type=content_type, content_encoding=content_encoding))
                        #http_details = HTTPDetails.objects.get(http_type="response", version=version, headers=header, status=status, content_type=content_type, content_encoding=content_encoding, flow_details=detail)
                        if len(http_li) == 1:
                            http_details = http_li[0]
                            file_path = os.path.join(path, "html-files")
                            http_details.file_path = file_path
                            http_details.save()
                        else: # encoding error is fixed
                            file_path = os.path.join(path, "html-files")
                            http_details = HTTPDetails(http_type="response", version=version, headers=header,
                                            status=status, content_type=content_type, content_encoding=content_encoding,
                                            file_path=file_path, flow_details=detail)
                            http_details.save()

            return True

        except:
            return False