Пример #1
0
    def save_request(self, **args):
        # the the ip from database

        try:
            flow = Flow.objects.get(hash_value=args['hash_value'])
            flow_details = flow.details
            for detail in flow_details:
                # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_orig.dat
                source_str = ":".join([detail.src_ip, str(detail.sport)])
                destination_str = ":".join([detail.dst_ip, str(detail.dport)])
                flow_str = "-".join([source_str, destination_str])
                orig_file = "_".join(["contents", flow_str,"orig.dat"])
                file_path = "/".join([args['path'], orig_file])
                file_path = str(file_path)

                strings = ["GET", "PUT", "POST"]
                file_handler = FileHandler()
                requests = []
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    requests.append(item[0])

                # i am making a hacky thing here, finding empty lines, each request is separeted with an empty line
                empty_lines = []
                strings = ["\r\n\r\n"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    empty_lines.append(item[0])

                for x in range(len(requests)):
                    # here i have the request header
                    data = file_handler.data
                    request = data[requests[x]:empty_lines[x]]
                    request_li = request.split("\n")

                    for entry in request_li:
                        # the first line is method and uri with version information
                        info = entry.split(":")
                        if len(info) == 1:
                            info = info[0].split()
                            method = info[0]
                            uri = info[1]
                            version = info[2].split("/")[1]

                            try:
                                http_details = HTTPDetails.objects.get(http_type="request", method=method, uri=uri, headers=request_li, version=version, flow_deatils=detail)
                            except:
                                http_details = HTTPDetails(http_type="request", method=method, uri=uri, headers=request_li, version=version, flow_details=detail)
                                http_details.save()
            return True

        except Exception, ex:
            return False
Пример #2
0
def upload(request):
    log = Logger("Upload form", "DEBUG")
    context = {
        'page_title': 'Upload your pcap file here',
        'upload_status': False,
        'message': request.session.get('message', False)
    }
    if request.method == "POST":
        form = UploadPcapForm(request.POST, request.FILES)
        if form.is_valid():
            user_id = request.user.id
            context['form'] = form
            file_handler = FileHandler()
            file_handler.create_dir()
            mem_file = request.FILES['pcap_file']
            log.message("file: %s" % mem_file.name)
            file_handler.save_file(mem_file)
            context['upload_status'] = True

            #save the file name to the db
            pcap_name = mem_file.name
            upload_path = file_handler.upload_dir
            # evey pcap file is saved as a flow container, there may or may not be flows, the pcaps colon will give the flow pcaps
            hash_handler = HashHandler()
            hash_value = hash_handler.get_hash(os.path.join(upload_path, pcap_name))
            request.session['uploaded_hash'] = hash_value
            request.session['uploaded_file_name'] = pcap_name
            # send the file to the defined protocol handler so that it can detect
            protocol_handler = settings.PROTOCOL_HANDLER
            package = "ovizart.modules.traffic.detector"
            module_name = ".".join([package, protocol_handler])
            # from ovizart.modules.traffic.detector.x import handler as imported_module
            traffic_detector_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
            traffic_detector_handler = traffic_detector_module.Handler()
            traffic_detector_handler.create_reassemble_information(file_handler.file_path, upload_path)
            output = traffic_detector_handler.detect_proto(file_handler.file_path, upload_path)

            if output == False:
                request.session['message'] = "Error occured. Please try again."
                return redirect('/pcap/upload')


            file_type = get_file_type(file_handler.file_path)
            file_size = get_file_size(file_handler.file_path)
            flow_file, created = Flow.objects.get_or_create(user_id=user_id, hash_value=hash_value,file_name=pcap_name,
                                                            path=upload_path, upload_time=datetime.datetime.now(),
                                                            file_type=file_type, file_size=file_size)

            if "tcp" in output:
                log.message("protocol detected: %s" % "TCP")
                # run tcp flow extractor
                p_read_handler = PcapHandler()
                p_read_handler.open_file(file_handler.file_path)
                p_read_handler.open_pcap()

                f_handler = FlowHandler(p_read_handler)
                flow, direction = f_handler.get_tcp_flows()

                p_write_handler = PcapHandler()
                files = f_handler.save_flow(flow, p_write_handler, save_path=upload_path)

                # save the flow pcap names to the mongo db
                pcap_list = map(lambda x: Pcap.objects.create(hash_value=hash_handler.get_hash(os.path.join(upload_path, x)), file_name=x, path=upload_path), files.values()[0])
                if flow_file.pcaps:
                    pre_li = flow_file.pcaps
                    pre_li.extend(pcap_list)
                    flow_file.pcaps = pre_li
                else:
                    flow_file.pcaps = pcap_list
                flow_file.save()

                p_read_handler.close_file()
                p_write_handler.close_file()
                # now i should hook a protocol detector
                # before that i should detect the application level protocol
                for f in files.values()[0]:
                    packets  = []
                    # better to save tcp level information to db here
                    full_path = os.path.join(upload_path, f)
                    p_read_handler.open_file(full_path)
                    p_read_handler.open_pcap()
                    pcap = p_read_handler.get_pcap()
                    # the list that will keep the tcp part of the packet
                    tcp_list = []
                    for ts, buf in pcap:
                        tcp_handler = TcpHandler()
                        tcp = tcp_handler.read_tcp(ts, buf)
                        # this list will be used at the layers above tcp
                        if tcp:
                            tcp_list.append((tcp, tcp_handler.ident))
                        else: continue
                        tcp_data = u"."
                        if tcp_handler.data:
                            tcp_data = tcp_handler.data
                            # some requests include hexadecimal info, most probably some binary info that can not be
                            # converted to the utf8, for now i better remove them, #TODO should handle them, though
                            # try with 4, tcp.data has binary request
                            # def get_tcp(n):
                            #    count = 1
                            #    f = file("milliyet.pcap", "rb")
                            #    reader = dpkt.pcap.Reader(f)
                            #    for ts, buf in reader:
                            #        if count == n:
                            #            f.close()
                            #            return buf
                            #        count += 1

                            data_li = tcp_data.split("\r\n")
                            tmp = []
                            for data in data_li:
                                try:
                                    data.encode("utf-8")
                                    tmp.append(data)
                                except:
                                    tmp.append("data that can not be encoded to utf-8")

                            tcp_data = " \n".join(tmp)

                        packet = PacketDetails.objects.create(ident=tcp_handler.ident, flow_hash=hash_value, timestamp=tcp_handler.timestamp,
                                                                length=tcp_handler.length, protocol=tcp_handler.proto,
                                                                src_ip=tcp_handler.src_ip,
                                                                dst_ip=tcp_handler.dst_ip, sport=tcp_handler.sport,
                                                                dport=tcp_handler.dport, data=str(tcp_data))
                        packets.append(packet)
                    # get the pcap object
                    p = Pcap.objects.get(hash_value=hash_handler.get_hash(os.path.join(upload_path, f)))
                    log.message("pcap for packet update detected: %s" % p)
                    # update its packets
                    p.packets = list(packets) # converting a queryset to list
                    p.save()
                    p_read_handler.close_file()

            if "udp" in output:
                log.message("protocol detected: %s" % "UDP")
                p_read_handler = PcapHandler()
                file_path = os.path.join(upload_path, pcap_name)
                p_read_handler.open_file(file_path)
                p_read_handler.open_pcap()
                udp_handler = UDPHandler()
                pcap = Pcap.objects.create(hash_value=hash_handler.get_hash(os.path.join(upload_path, pcap_name)), file_name=pcap_name, path=upload_path)
                pcap_list = list([pcap])
                if flow_file.pcaps:
                    pre_li = flow_file.pcaps
                    pre_li.extend(pcap_list)
                    flow_file.pcaps = pre_li
                else:
                    flow_file.pcaps = pcap_list
                flow_file.save()

                packets  = []
                for ts, buf in p_read_handler.get_reader():
                    udp = udp_handler.read_udp(ts, buf)
                    if udp:
                        udp_data = u"."
                        if udp_handler.data:
                            udp_data = udp_handler.data
                            try:
                                udp_data = udp_data.encode("utf-8")
                            except:
                                udp_data = "data that can not be encoded to utf-8"
                        packet = PacketDetails.objects.create(ident=udp_handler.ident, flow_hash=hash_value, timestamp=udp_handler.timestamp,
                                                            length = udp_handler.length,
                                                            protocol=udp_handler.proto, src_ip=udp_handler.src_ip,
                                                            dst_ip=udp_handler.dst_ip, sport=udp_handler.sport,
                                                            dport=udp_handler.dport, data=str(udp_data))
                        packets.append(packet)
                        # get the pcap object
                p = Pcap.objects.get(hash_value=hash_handler.get_hash(os.path.join(upload_path, pcap_name)))
                # update its packets
                p.packets = list(packets) # converting a queryset to list
                p.save()
                p_read_handler.close_file()


            # starting the bro related issues for the reassembled data
            output = traffic_detector_handler.detect_appproto(file_handler.file_path, upload_path)
            log.message("protocol detected: %s" % output)
            if output and "http" in output:
                log.message("protocol detected: %s" % "HTTP")
                # save the reassembled http session IPs to FlowDetails

                # this part is checking the http handler module name and importing the handler
                http_protocol_handler = settings.HTTP_HANDLER
                package = "ovizart.modules.traffic.parser.tcp"
                module_name = ".".join([package, http_protocol_handler])
                # from ovizart.modules.traffic.parser.tcp.x import handler as imported_module
                http_handler_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
                http_handler = http_handler_module.Handler()
                # define a get_flow_ips function for the custom handler if required
                # TODO: save the timestamps of the flows
                flow_ips = http_handler.get_flow_ips(path=upload_path)
                flow_detail_li = []
                for detail in flow_ips:
                    flow_detail, create = FlowDetails.objects.get_or_create(parent_hash_value=request.session['uploaded_hash'], user_id=user_id, src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="http", timestamp = detail[4])
                    flow_detail_li.append(flow_detail)
                if flow_file.details:
                    pre_li = flow_file.details
                    pre_li.extend(flow_detail_li)
                    flow_file.details = pre_li
                else:
                    flow_file.details = flow_detail_li
                flow_file.save()
                # then call functions that will save request and responses that will parse dat files, save the headers and files
                #http_handler.save_request(path=upload_path, hash_value=request.session['uploaded_hash'])
                #http_handler.save_response(path=upload_path, hash_value=request.session['uploaded_hash'])
                http_handler.save_request_response(path=upload_path, hash_value=request.session['uploaded_hash'])

            # dns realted issues starts here
            if output and "dns" in output:
                log.message("protocol detected: %s" % "DNS")
                dns_protocol_handler = settings.DNS_HANDLER
                package = "ovizart.modules.traffic.parser.udp"
                module_name = ".".join([package, dns_protocol_handler])
                # from ovizart.modules.traffic.parser.udp.x import handler as imported_module
                dns_handler_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
                dns_handler = dns_handler_module.Handler()
                # define a get_flow_ips function for the custom handler if required
                flow_ips = dns_handler.get_flow_ips(path=upload_path, file_name=request.session['uploaded_file_name'])
                flow_detail_li = []
                for detail in flow_ips:
                    flow_detail, create = FlowDetails.objects.get_or_create(parent_hash_value=request.session['uploaded_hash'], user_id=user_id, src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="dns", timestamp = detail[4])
                    flow_detail_li.append(flow_detail)
                if flow_file.details:
                    pre_li = flow_file.details
                    pre_li.extend(flow_detail_li)
                    flow_file.details = pre_li
                else:
                    flow_file.details = flow_detail_li
                flow_file.save()

                dns_handler.save_request_response()

            if output and "smtp" in output:
                log.message("protocol detected: %s" % "SMTP")
                smtp_protocol_handler = settings.SMTP_HANDLER
                package = "ovizart.modules.traffic.parser.tcp"
                module_name = ".".join([package, smtp_protocol_handler])
                # from ovizart.modules.traffic.parser.tcp.x import handler as imported_module
                smtp_handler_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
                smtp_handler = smtp_handler_module.Handler()
                # define a get_flow_ips function for the custom handler if required
                smtp_handler.set_flow(flow_file) # i need this, to get the timestamp from a packet belongs to the flow
                flow_ips = smtp_handler.get_flow_ips(path=upload_path, file_name=request.session['uploaded_file_name'])
                flow_detail_li = []
                for detail in flow_ips:
                    flow_detail, create = FlowDetails.objects.get_or_create(parent_hash_value=request.session['uploaded_hash'], user_id=user_id, src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="smtp", timestamp = detail[4])
                    flow_detail_li.append(flow_detail)
                if flow_file.details:
                    pre_li = flow_file.details
                    pre_li.extend(flow_detail_li)
                    flow_file.details = pre_li
                else:
                    flow_file.details = flow_detail_li
                flow_file.save()

                smtp_handler.save_request_response(upload_path=upload_path)

            else:
                log.message("protocol detected: %s" % "Unknown")
                unknown_protocol_handler = settings.UNKNOWN_HANDLER
                package = "ovizart.modules.traffic.parser"
                module_name = ".".join([package, unknown_protocol_handler])
                unknown_handler_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
                unknown_handler = unknown_handler_module.Handler()
                flow_ips = unknown_handler.get_flow_ips(path=upload_path, file_name=request.session['uploaded_file_name'], parent_hash_value=request.session['uploaded_hash'], user_id=user_id)
                flow_detail_li = []
                for detail in flow_ips:
                    flow_detail, create = FlowDetails.objects.get_or_create(parent_hash_value=request.session['uploaded_hash'], user_id=user_id, src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="unknown", timestamp = detail[4])
                    if created:
                        flow_detail_li.append(flow_detail)

                if flow_file.details:
                    pre_li = flow_file.details
                    pre_li.extend(flow_detail_li)
                    flow_file.details = pre_li
                else:
                    flow_file.details = flow_detail_li
                flow_file.save()



    else:
        form = UploadPcapForm()
        context['form'] = form

    request.session['message'] = False
    return render_to_response("pcap/upload.html",
            context_instance=RequestContext(request, context))
Пример #3
0
    def save_response_files(self, path, hash_value):
        try:
            flow = Flow.objects.get(hash_value=hash_value)
            flow_details = flow.details
            for detail in flow_details:
                # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_resp.dat
                source_str = ":".join([detail.src_ip, str(detail.sport)])
                destination_str = ":".join([detail.dst_ip, str(detail.dport)])
                flow_str = "-".join([source_str, destination_str])
                resp_file = "_".join(["contents", flow_str,"resp.dat"])
                file_path = "/".join([path, resp_file])
                # path is created as unicode, convert it a regular string for hachoir operation
                file_path = str(file_path)

                strings = ["Content-Type: text/html", "Content-Type: application/x-javascript", "Content-Type: text/css"]
                file_handler = FileHandler()
                responses = []
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    responses.append(item[0])

                empty_lines = []
                strings = ["\r\n\r\n"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    empty_lines.append(item[0])

                http_lines = []
                strings = ["HTTP/1.1"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    http_lines.append(item[0])

                try:
                    stream = FileInputStream(unicodeFilename(file_path), real_filename=file_path)
                except NullStreamError:
                    continue
                subfile = SearchSubfile(stream, 0, None)
                subfile.loadParsers()
                root = "/".join([path, "html-files"])
                if not os.path.exists(root):
                    os.makedirs(root)
                output = "/".join([root, flow_str])
                output = str(output)
                subfile.setOutput(output)

                for x in range(len(responses)):
                    # here i have the request header
                    data = file_handler.data
                    #f = data[empty_lines[x]:http_lines[x+1]]
                    file_ext = ".txt"
                    #if ("html" in f or "body" in f):
                    #    file_ext = ".html"
                    #elif ("script" in f):
                     #   file_ext = ".js"
                    #else:

                    # select the closest empty line
                    empty_lines.append(responses[x])
                    empty_lines.sort()
                    index = empty_lines.index(responses[x])
                    offset = empty_lines[index+1]

                    size = None
                    try:
                        size = http_lines[x+1]-2
                    except IndexError:
                        size = stream.size

                    f = data[offset+4:size]

                    filename = subfile.output.createFilename(file_ext)
                    w = open("/".join([output, filename]), "w")
                    w.write(f)
                    w.close()

                # saving the hachoir saved binaries to the db with the created txt files
                if detail.protocol == "http":
                    http_files = os.listdir(output)
                    #http_files = filter(lambda x: x.split(".")[-1] != 'txt', http_files) # no need to take the txt files
                    if len(http_files) > 0:
                        http_li = filter(lambda x: x.flow_details.id == detail.id, HTTPDetails.objects.all())
                        for http in http_li:
                            http.files = http_files
                            http.save()

            return True

        except Exception, ex:
            print ex
            return False
Пример #4
0
    def save_response_headers(self, path, hash_value):
        try:
            flow = Flow.objects.get(hash_value=hash_value)
            flow_details = flow.details
            for detail in flow_details:
                # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_resp.dat
                source_str = ":".join([detail.src_ip, str(detail.sport)])
                destination_str = ":".join([detail.dst_ip, str(detail.dport)])
                flow_str = "-".join([source_str, destination_str])
                resp_file = "_".join(["contents", flow_str,"resp.dat"])
                file_path = "/".join([path, resp_file])
                # path is created as unicode, convert it a regular string for hachoir operation
                file_path = str(file_path)

                strings = ["HTTP/1.1"]
                file_handler = FileHandler()
                responses = []
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    responses.append(item[0])

                empty_lines = []
                strings = ["\r\n\r\n"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    empty_lines.append(item[0])

                for x in range(len(responses)):
                    # here i have the request header
                    data = file_handler.data
                    response = data[responses[x]:empty_lines[x]]
                    response_li = response.split("\n")

                    header = info = version = status = content_type = content_encoding = None

                    for entry in response_li:
                        # the first line is method and uri with version information
                        info = entry.split(":")
                        if len(info) == 1:
                            info = info[0].split()
                            version = info[0].split("/")[1]
                            status = info[1]
                            header = response_li

                        else:
                            if "Content-Type" in info:
                                content_type = info[1]

                            if filter(lambda x: "gzip" in x, info):
                                content_encoding = "gzip"


                        http_li = filter(lambda x: x.flow_details.id == detail.id, HTTPDetails.objects.filter(http_type="response",
                            version=version, headers=header, status=status,
                            content_type=content_type, content_encoding=content_encoding))
                        #http_details = HTTPDetails.objects.get(http_type="response", version=version, headers=header, status=status, content_type=content_type, content_encoding=content_encoding, flow_details=detail)
                        if len(http_li) == 1:
                            http_details = http_li[0]
                            file_path = os.path.join(path, "html-files")
                            http_details.file_path = file_path
                            http_details.save()
                        else: # encoding error is fixed
                            file_path = os.path.join(path, "html-files")
                            http_details = HTTPDetails(http_type="response", version=version, headers=header,
                                            status=status, content_type=content_type, content_encoding=content_encoding,
                                            file_path=file_path, flow_details=detail)
                            http_details.save()

            return True

        except:
            return False