Beispiel #1
0
def flow_pcap_details(request, flow_pcap_md5):
    log = Logger("Pcap file details", "DEBUG")
    flow = Flow.objects.get(hash_value=flow_pcap_md5)

    url = "".join([settings.BASE_URL, "/api/rest/all_protocols_by_hash/?format=json", "&parent_hash_value=", flow_pcap_md5])
    log.message("URL: %s" % (url))
    req = urllib2.Request(url, None)
    opener = urllib2.build_opener()
    f = opener.open(req)
    json_response = json.load(f)
    json_data = json.dumps(json_response)
    json_dir = os.path.join(settings.PROJECT_ROOT, "json_files")
    json_file = tempfile.NamedTemporaryFile(mode="w", dir=json_dir, delete=False)

    file_name = os.path.basename(json_file.name)
    # save the json data to the temporary file
    json_file.write(json_data)
    json_file.close()

    context = {
        'page_title': " ".join([flow.file_name, "Details"]),
        'flow': flow,
        'pcap_operation': "file_details",
        'json_file_url': os.path.join(settings.ALTERNATE_BASE_URL, "json_media", file_name),
        'json_response': json_response,
        'hash_value': flow_pcap_md5,
        'select_update_li': ["file_details", "file_summary"]
    }
    return render_to_response("pcap/file_details.html",
        context_instance=RequestContext(request, context))
Beispiel #2
0
 def __init__(self):
     self.log = Logger("SMTP Protocol Handler", "DEBUG")
     self.log.message("SMTP protocol handler called")
     self.file_name_li = []
     self.flow = None
     self.toProcess = dict()
     self.reportRoot = None
     self.streamcounter = 0
Beispiel #3
0
 def __init__(self):
     super(Handler, self).__init__()
     self.timestamp = None
     self.proto = None
     self.src_ip = None
     self.dst_ip = None
     self.sport = None
     self.dport = None
     self.ident = None
     self.length = None
     self.data = None
     self.log = Logger("TCP Protocol Handler", "DEBUG")
     self.log.message("TCP protocol handler called")
Beispiel #4
0
class Handler(object):
    def __init__(self):
        super(Handler, self).__init__()
        self.timestamp = None
        self.proto = None
        self.src_ip = None
        self.dst_ip = None
        self.sport = None
        self.dport = None
        self.ident = None
        self.length = None
        self.data = None
        self.log = Logger("TCP Protocol Handler", "DEBUG")
        self.log.message("TCP protocol handler called")

    def read_tcp(self, ts, buf):
        eth = self.get_eth(buf)
        if not eth:
            return False
        ip = self.get_ip(eth)
        if not ip:
            return False
        self.timestamp = datetime.datetime.fromtimestamp(float(ts))
        tcp = self.get_tcp(ip)
        return tcp

    def get_eth(self, buf):
        eth = dpkt.ethernet.Ethernet(buf)
        if eth.type != dpkt.ethernet.ETH_TYPE_IP:
            return False
        else:
            return eth

    def get_ip(self, eth):
        ip = eth.data
        self.length = ip.len
        if ip.p != dpkt.ip.IP_PROTO_TCP:
            return False
        else:
            self.proto = ip.p
            self.src_ip = '.'.join(str(ord(c)) for c in ip.src)
            self.dst_ip = '.'.join(str(ord(c)) for c in ip.dst)
            return ip

    def get_tcp(self, ip):
        tcp = ip.data
        self.ident = ip.id
        self.sport = tcp.sport
        self.dport = tcp.dport
        self.data = tcp.data
        return tcp
Beispiel #5
0
class Handler(object):
    def __init__(self):
        super(Handler, self).__init__()
        self.timestamp = None
        self.proto = None
        self.src_ip = None
        self.dst_ip = None
        self.sport = None
        self.dport = None
        self.ident = None
        self.length = None
        self.data = None
        self.log = Logger("TCP Protocol Handler", "DEBUG")
        self.log.message("TCP protocol handler called")

    def read_tcp(self, ts, buf):
        eth = self.get_eth(buf)
        if not eth:
            return False
        ip = self.get_ip(eth)
        if not ip:
            return False
        self.timestamp = datetime.datetime.fromtimestamp(float(ts))
        tcp = self.get_tcp(ip)
        return tcp

    def get_eth(self, buf):
        eth = dpkt.ethernet.Ethernet(buf)
        if eth.type != dpkt.ethernet.ETH_TYPE_IP:
            return False
        else:
            return eth

    def get_ip(self, eth):
        ip = eth.data
        self.length = ip.len
        if ip.p != dpkt.ip.IP_PROTO_TCP:
            return False
        else:
            self.proto = ip.p
            self.src_ip = '.'.join(str(ord(c)) for c in ip.src)
            self.dst_ip = '.'.join(str(ord(c)) for c in ip.dst)
            return ip

    def get_tcp(self, ip):
        tcp = ip.data
        self.ident = ip.id
        self.sport = tcp.sport
        self.dport = tcp.dport
        self.data = tcp.data
        return tcp
Beispiel #6
0
class Handler:
    def __init__(self):
        self.file_path = None
        self.file_name = None
        self.stream = None
        self.data = None
        self.log = Logger("File Handler", "DEBUG")

    def create_dir(self):
        now = datetime.datetime.now()
        self.log.message("Now is: %s:" % now)
        directory_name = now.strftime("%d-%m-%y")
        self.log.message("Directory name: %s:" % directory_name)
        directory_path = "/".join([settings.PROJECT_ROOT, "uploads", directory_name])
        self.log.message("Directory path: %s" % directory_path)
        if not os.path.exists(directory_path):
            os.mkdir(directory_path)
            self.log.message("Directory created")
        # we need to create another directory also for each upload
        new_dir = generate_name_from_timestame()
        new_dir_path = "/".join([directory_path, new_dir])
        if not os.path.exists(new_dir_path):
            os.mkdir(new_dir_path)
            self.log.message("Directory created")
        self.upload_dir = new_dir_path

    def save_file(self, f):
        self.file_name = f.name
        self.file_path = "/".join([self.upload_dir, self.file_name])
        destination = open(self.file_path, 'wb+')
        for chunk in f.chunks():
            destination.write(chunk)
        destination.close()

    def search(self, file_path, strings=None):
        try:
            self.stream = FileInputStream(unicodeFilename(file_path), real_filename=file_path)
        except NullStreamError:
            return False
        patterns = PatternMatching()
        for s in strings:
            patterns.addString(s)

        start = 0
        end = self.stream.size
        self.data = self.stream.readBytes(start, end//8)
        return patterns.search(self.data)

    def reset_data(self):
        self.data = None
Beispiel #7
0
class Handler(object):
    def __init__(self):
        super(Handler, self).__init__()
        self.log = Logger("Base Protocol Handler", "DEBUG")
        self.log.message("base protocol handler called")

    def create_reassemble_information(self, **params):
        pass

    def detect_proto(self, **params):
        pass

    def detect_appproto(self, **params):
        pass
Beispiel #8
0
class Handler(object):
    def __init__(self):
        super(Handler, self).__init__()
        self.log = Logger("Base Protocol Handler", "DEBUG")
        self.log.message("base protocol handler called")

    def create_reassemble_information(self, **params):
        pass

    def detect_proto(self, **params):
        pass

    def detect_appproto(self, **params):
        pass
Beispiel #9
0
 def __init__(self):
     super(Handler, self).__init__()
     self.timestamp = None
     self.proto = None
     self.src_ip = None
     self.dst_ip = None
     self.sport = None
     self.dport = None
     self.ident = None
     self.length = None
     self.data = None
     self.log = Logger("TCP Protocol Handler", "DEBUG")
     self.log.message("TCP protocol handler called")
Beispiel #10
0
def login_user(request):
    log = Logger("Login form", "DEBUG")
    form = None
    logged = False
    if request.session.has_key('logged_in'):
        logged = True
    if logged or request.method == "POST":
        form = LoginForm(request.POST)
        if logged or form.is_valid():
            user = username = email = password = None
            if logged:
                username = request.session['username']
                email = request.session['user_email']
                password = request.session['password']
            else:
                username = request.POST['username']
                request.session['username'] = username
                email = request.POST['user_email']
                request.session['user_email'] = email
                password = request.POST['password']
                request.session['password'] = password
            user = authenticate(username=username, password=password)
            if user is not None:
                if user.is_active:
                    login(request, user)
                    request.session['logged_in'] = True
                    user_id = request.user.id
                    url = "".join([settings.BASE_URL, "/api/rest/all_protocols/?format=json"])
                    log.message("URL: %s" % (url))
                    req = urllib2.Request(url, None)
                    opener = urllib2.build_opener()
                    f = opener.open(req)
                    json_response = json.load(f)
                    json_data = json.dumps(json_response)
                    json_dir = os.path.join(settings.PROJECT_ROOT, "json_files")
                    json_file = tempfile.NamedTemporaryFile(mode="w", dir=json_dir, delete=False)

                    user_json_file = UserJSonFile.objects.filter(user_id=user_id, json_type="summary-size")
                    if len(user_json_file) > 0:
                        user_json_file[0].delete()
                        file_path = os.path.join(settings.PROJECT_ROOT, "json_files", user_json_file[0].json_file_name)
                        try:
                            os.unlink(file_path)
                        except:
                            pass

                    file_name = os.path.basename(json_file.name)
                    # save the json data to the temporary file
                    json_file.write(json_data)
                    json_file.close()
                    user_json_file = UserJSonFile(user_id=user_id, json_type="summary-size", json_file_name=file_name)
                    user_json_file.save()
                    context = {
                        'page_title': 'Welcome to %s' % settings.PROJECT_NAME,
                        'pcap_operation': "welcome",
                        'json_file_url': os.path.join(settings.ALTERNATE_BASE_URL, "json_media", file_name),
                        'json_response': json_response
                    }

                    return render_to_response("main/welcome.html", context,
                            context_instance=RequestContext(request))
                else:
                    context = {
                            'error_message': 'User is not activated!',
                            'page_title': 'Login Page'
                        }
                    return render_to_response("main/login.html", context,
                        context_instance=RequestContext(request))
            else:
                context = {
                    'error_message': 'Error occured at the user authentication',
                    'page_title': 'Login Page'
                }
                return render_to_response("main/login.html", context,
                    context_instance=RequestContext(request))
        else:
            context = {
            'form': form,
            'page_title': 'Login Page'
            }
            return render_to_response("main/login.html", context,
                context_instance=RequestContext(request))
    else:
        form = LoginForm()

        context = {
            'form': form,
            'page_title': 'Login Page'
        }
        return render_to_response("main/login.html", context,
            context_instance=RequestContext(request))
Beispiel #11
0
def file_pcap_summary(request, hash_value):
    # to get this work, runserver should be run as bin/django runserver 127.0.0.0:8001 and another instance should be run as
    # bin/django runserver
    log = Logger("Summary:", "DEBUG")
    context = {
        'page_title': 'Timeline view for the pcap',
        'hash_value': hash_value
        }

    url = "".join([settings.BASE_URL, "/api/rest/protocols_by_hash/?format=json", "&parent_hash_value=", hash_value])
    log.message("URL: %s" % (url))
    req = urllib2.Request(url, None)
    opener = urllib2.build_opener()
    f = None
    try:
        f = opener.open(req)
        json_response = json.load(f)

        result = []
        response_dict = dict()
        legend = []
        protocols_found = []

        for response in json_response:
            # indeed i have only one response for now, i decided to put all responses in one timeline instead of multiple timelines
            id = os.urandom(4)
            response_dict["id"] = id.encode('hex')
            response_dict['title'] = "Summary For the Uploaded PCAP"
            response_dict['focus_date'] = None # will be fixed
            response_dict['initial_zoom'] = "38"

            time_keeper = {'start': None, 'end': None}
            importance_keeper = []

            # events creation starts here
            events = []
            for protocol, values in response.iteritems():
                count = 0
                for value in values:
                    event_dict = dict()
                    event_dict['id'] = "-".join([response_dict["id"], protocol, str(count)])
                    event_dict['link'] = reverse('flow_details', args=(value['flow_id'],))
                    if value.has_key("type") and value['type']:
                        event_dict['title'] = value['type']
                    else:
                        event_dict['title'] = protocol
                    if value.has_key('description') and value['description']:
                        event_dict['description'] = cgi.escape(value['description'])
                    else:
                        event_dict['description'] = "No description is set"
                    event_dict['startdate'] = value['start']
                    event_dict['enddate'] = value['end']

                    dt_start = datetime.datetime.strptime(value['start'], "%Y-%m-%d %H:%M:%S")
                    dt_end = datetime.datetime.strptime(value['end'], "%Y-%m-%d %H:%M:%S")
                    if not time_keeper['start']:
                        time_keeper['start'] = dt_start
                    if dt_start <= time_keeper['start']:
                        time_keeper['start'] = dt_start
                    if not time_keeper['end']:
                        time_keeper['end'] = dt_end
                    if dt_end >= time_keeper['end']:
                        time_keeper['end'] = dt_end

                    event_dict['date_display'] = 'day'
                    ts = int(datetime.datetime.strptime(value['start'], "%Y-%m-%d %H:%M:%S").strftime("%s"))
                    importance = translate_time(ts)
                    #importance = random.randrange(1, 100)
                    event_dict['importance'] = importance
                    event_dict['high_threshold'] = int(importance) + 5
                    importance_keeper.append(int(importance))
                    if protocol not in protocols_found:
                        protocols_found.append(protocol)
                    event_dict['icon'] = ICONS[protocol]
                    events.append(event_dict)
                    count += 1
            response_dict['events'] = events
            # calculate the middle of the time
            mid_point = time_keeper['start'] + ((time_keeper['end'] - time_keeper['start']) / 2)
            response_dict['focus_date'] = mid_point.isoformat(sep=" ")

            # calculate initial zoom
            response_dict['initial_zoom'] = repr(int((importance_keeper[0]+importance_keeper[-1])/2))

            for proto in protocols_found:
                tmp = dict()
                tmp['title'] = repr(proto)
                tmp['icon'] = ICONS[proto]
                legend.append(tmp)

            response_dict['legend'] = legend
            result.append(response_dict)

        json_data = json.dumps(result)
        json_dir = os.path.join(settings.PROJECT_ROOT, "json_files")
        json_file = tempfile.NamedTemporaryFile(mode="w", dir=json_dir, delete=False)

        file_name = os.path.basename(json_file.name)
        # save the json data to the temporary file
        json_file.write(json_data)
        json_file.close()
        context['json_file_url'] = os.path.join(settings.ALTERNATE_BASE_URL, "json_media", file_name)
        context['icon_folder']  = os.path.join(settings.ALTERNATE_BASE_URL, "/site_media/jquery_widget/js/timeglider/icons/")
        context['pcap_operation'] = "file_summary"
        context['summary_li'] = ["summary", "file_summary"]

        # get the summary query infos
        flow = Flow.objects.get(hash_value=hash_value)
        context['flow'] = flow

        flow_details = FlowDetails.objects.filter(parent_hash_value=hash_value)
        flow_details_dict = dict()

        f_d = dict()
        for flow_detail in flow_details:
            if not flow_details_dict.has_key(flow_detail.protocol):
                flow_details_dict[flow_detail.protocol] = dict()
                f_d = flow_details_dict[flow_detail.protocol]
                f_d['count'] = 1
                f_d['timestamps'] = [flow_detail.timestamp]
            else:
                f_d['count'] += 1
                f_d['timestamps'].append(flow_detail.timestamp)

        for key, value in flow_details_dict.items():
            ts = flow_details_dict[key]['timestamps']
            ts.sort()
            flow_details_dict[key]['start'] = ts[0]
            flow_details_dict[key]['end'] = ts[-1]

        context['flow_details'] = flow_details_dict
        context['ALTERNATE_BASE_URL'] = settings.ALTERNATE_BASE_URL
        context['select_update_li'] =  ["file_details", "file_summary"]


        return render_to_response("pcap/file_summary.html",
            context_instance=RequestContext(request, context))

    except Exception, ex:
        log.message(ex)
        raise Http404
Beispiel #12
0
 def __init__(self, debug_mode="DEBUG"):
     self._logger = Logger(log_name="Pcap Handler", log_mode=debug_mode)
     self._logger.message("Pcap Handler initialized")
     self._pcap = None
     self._filter_type = None
     self._file_pointer = None
Beispiel #13
0
def visualize(request, flow_pcap_md5, protocol, type="size"):
    if type == "size":
        # to get this work, runserver should be run as bin/django runserver 127.0.0.0:8001 and another instance should be run as
        # bin/django runserver
        log = Logger("Visualize:", "DEBUG")
        context = {
            'page_title': 'Packet Sizes',
            }
        #user_id = request.user.id will use them is user is logged_ib
        url = "".join([settings.BASE_URL, "/api/rest/protocol_size_by_hash/?format=json&parent_hash_value=", flow_pcap_md5, "&protocol=", protocol])
        log.message("URL: %s" % (url))
        req = urllib2.Request(url, None)
        opener = urllib2.build_opener()
        f = None
        try:
            f = opener.open(req)
            json_response = json.load(f)
            json_data = json.dumps(json_response)

            context['children'] = json_response['children']
            context['flow_details'] = json_response
            context['pcap_operation'] = "summary-size"

            json_dir = os.path.join(settings.PROJECT_ROOT, "json_files")
            json_file = tempfile.NamedTemporaryFile(mode="w", dir=json_dir, delete=False)

            file_name = os.path.basename(json_file.name)
            # save the json data to the temporary file
            json_file.write(json_data)
            json_file.close()
            context['json_file_url'] = os.path.join(settings.ALTERNATE_BASE_URL, "json_media", file_name)

            context['measure'] = 'size'

            return render_to_response("pcap/summary-size.html",
        context_instance=RequestContext(request, context))

        except:
            # return html template
            pass
    else:
        # to get this work, runserver should be run as bin/django runserver 127.0.0.0:8001 and another instance should be run as
        # bin/django runserver
        log = Logger("Visualize:", "DEBUG")
        context = {
            'page_title': 'Packet counts of the uploaded pcaps',
            }
        user_id = request.user.id
        url = "".join([settings.BASE_URL, "/api/rest/protocol_count_by_hash/?format=json&parent_hash_value=", flow_pcap_md5, "&protocol=", protocol], )
        log.message("URL: %s" % (url))
        req = urllib2.Request(url, None)
        opener = urllib2.build_opener()
        f = None
        try:
            f = opener.open(req)
            json_response = json.load(f)

            json_data = json.dumps(json_response)
            context['children'] = json_response['children']
            context['flow_details'] = json_response
            context['pcap_operation'] = "summary-size"

            json_dir = os.path.join(settings.PROJECT_ROOT, "json_files")
            json_file = tempfile.NamedTemporaryFile(mode="w", dir=json_dir, delete=False)

            file_name = os.path.basename(json_file.name)
            # save the json data to the temporary file
            json_file.write(json_data)
            json_file.close()
            context['json_file_url'] = os.path.join(settings.ALTERNATE_BASE_URL, "json_media", file_name)

            return render_to_response("pcap/summary-size.html",
                context_instance=RequestContext(request, context))

        except Exception, ex:
            raise Http404
Beispiel #14
0
def upload(request):
    log = Logger("Upload form", "DEBUG")
    context = {
        'page_title': 'Upload your pcap file here',
        'upload_status': False,
        'message': request.session.get('message', False)
    }
    if request.method == "POST":
        form = UploadPcapForm(request.POST, request.FILES)
        if form.is_valid():
            user_id = request.user.id
            context['form'] = form
            file_handler = FileHandler()
            file_handler.create_dir()
            mem_file = request.FILES['pcap_file']
            log.message("file: %s" % mem_file.name)
            file_handler.save_file(mem_file)
            context['upload_status'] = True

            #save the file name to the db
            pcap_name = mem_file.name
            upload_path = file_handler.upload_dir
            # evey pcap file is saved as a flow container, there may or may not be flows, the pcaps colon will give the flow pcaps
            hash_handler = HashHandler()
            hash_value = hash_handler.get_hash(os.path.join(upload_path, pcap_name))
            request.session['uploaded_hash'] = hash_value
            request.session['uploaded_file_name'] = pcap_name
            # send the file to the defined protocol handler so that it can detect
            protocol_handler = settings.PROTOCOL_HANDLER
            package = "ovizart.modules.traffic.detector"
            module_name = ".".join([package, protocol_handler])
            # from ovizart.modules.traffic.detector.x import handler as imported_module
            traffic_detector_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
            traffic_detector_handler = traffic_detector_module.Handler()
            traffic_detector_handler.create_reassemble_information(file_handler.file_path, upload_path)
            output = traffic_detector_handler.detect_proto(file_handler.file_path, upload_path)

            if output == False:
                request.session['message'] = "Error occured. Please try again."
                return redirect('/pcap/upload')


            file_type = get_file_type(file_handler.file_path)
            file_size = get_file_size(file_handler.file_path)
            flow_file, created = Flow.objects.get_or_create(user_id=user_id, hash_value=hash_value,file_name=pcap_name,
                                                            path=upload_path, upload_time=datetime.datetime.now(),
                                                            file_type=file_type, file_size=file_size)

            if "tcp" in output:
                log.message("protocol detected: %s" % "TCP")
                # run tcp flow extractor
                p_read_handler = PcapHandler()
                p_read_handler.open_file(file_handler.file_path)
                p_read_handler.open_pcap()

                f_handler = FlowHandler(p_read_handler)
                flow, direction = f_handler.get_tcp_flows()

                p_write_handler = PcapHandler()
                files = f_handler.save_flow(flow, p_write_handler, save_path=upload_path)

                # save the flow pcap names to the mongo db
                pcap_list = map(lambda x: Pcap.objects.create(hash_value=hash_handler.get_hash(os.path.join(upload_path, x)), file_name=x, path=upload_path), files.values()[0])
                if flow_file.pcaps:
                    pre_li = flow_file.pcaps
                    pre_li.extend(pcap_list)
                    flow_file.pcaps = pre_li
                else:
                    flow_file.pcaps = pcap_list
                flow_file.save()

                p_read_handler.close_file()
                p_write_handler.close_file()
                # now i should hook a protocol detector
                # before that i should detect the application level protocol
                for f in files.values()[0]:
                    packets  = []
                    # better to save tcp level information to db here
                    full_path = os.path.join(upload_path, f)
                    p_read_handler.open_file(full_path)
                    p_read_handler.open_pcap()
                    pcap = p_read_handler.get_pcap()
                    # the list that will keep the tcp part of the packet
                    tcp_list = []
                    for ts, buf in pcap:
                        tcp_handler = TcpHandler()
                        tcp = tcp_handler.read_tcp(ts, buf)
                        # this list will be used at the layers above tcp
                        if tcp:
                            tcp_list.append((tcp, tcp_handler.ident))
                        else: continue
                        tcp_data = u"."
                        if tcp_handler.data:
                            tcp_data = tcp_handler.data
                            # some requests include hexadecimal info, most probably some binary info that can not be
                            # converted to the utf8, for now i better remove them, #TODO should handle them, though
                            # try with 4, tcp.data has binary request
                            # def get_tcp(n):
                            #    count = 1
                            #    f = file("milliyet.pcap", "rb")
                            #    reader = dpkt.pcap.Reader(f)
                            #    for ts, buf in reader:
                            #        if count == n:
                            #            f.close()
                            #            return buf
                            #        count += 1

                            data_li = tcp_data.split("\r\n")
                            tmp = []
                            for data in data_li:
                                try:
                                    data.encode("utf-8")
                                    tmp.append(data)
                                except:
                                    tmp.append("data that can not be encoded to utf-8")

                            tcp_data = " \n".join(tmp)

                        packet = PacketDetails.objects.create(ident=tcp_handler.ident, flow_hash=hash_value, timestamp=tcp_handler.timestamp,
                                                                length=tcp_handler.length, protocol=tcp_handler.proto,
                                                                src_ip=tcp_handler.src_ip,
                                                                dst_ip=tcp_handler.dst_ip, sport=tcp_handler.sport,
                                                                dport=tcp_handler.dport, data=str(tcp_data))
                        packets.append(packet)
                    # get the pcap object
                    p = Pcap.objects.get(hash_value=hash_handler.get_hash(os.path.join(upload_path, f)))
                    log.message("pcap for packet update detected: %s" % p)
                    # update its packets
                    p.packets = list(packets) # converting a queryset to list
                    p.save()
                    p_read_handler.close_file()

            if "udp" in output:
                log.message("protocol detected: %s" % "UDP")
                p_read_handler = PcapHandler()
                file_path = os.path.join(upload_path, pcap_name)
                p_read_handler.open_file(file_path)
                p_read_handler.open_pcap()
                udp_handler = UDPHandler()
                pcap = Pcap.objects.create(hash_value=hash_handler.get_hash(os.path.join(upload_path, pcap_name)), file_name=pcap_name, path=upload_path)
                pcap_list = list([pcap])
                if flow_file.pcaps:
                    pre_li = flow_file.pcaps
                    pre_li.extend(pcap_list)
                    flow_file.pcaps = pre_li
                else:
                    flow_file.pcaps = pcap_list
                flow_file.save()

                packets  = []
                for ts, buf in p_read_handler.get_reader():
                    udp = udp_handler.read_udp(ts, buf)
                    if udp:
                        udp_data = u"."
                        if udp_handler.data:
                            udp_data = udp_handler.data
                            try:
                                udp_data = udp_data.encode("utf-8")
                            except:
                                udp_data = "data that can not be encoded to utf-8"
                        packet = PacketDetails.objects.create(ident=udp_handler.ident, flow_hash=hash_value, timestamp=udp_handler.timestamp,
                                                            length = udp_handler.length,
                                                            protocol=udp_handler.proto, src_ip=udp_handler.src_ip,
                                                            dst_ip=udp_handler.dst_ip, sport=udp_handler.sport,
                                                            dport=udp_handler.dport, data=str(udp_data))
                        packets.append(packet)
                        # get the pcap object
                p = Pcap.objects.get(hash_value=hash_handler.get_hash(os.path.join(upload_path, pcap_name)))
                # update its packets
                p.packets = list(packets) # converting a queryset to list
                p.save()
                p_read_handler.close_file()


            # starting the bro related issues for the reassembled data
            output = traffic_detector_handler.detect_appproto(file_handler.file_path, upload_path)
            log.message("protocol detected: %s" % output)
            if output and "http" in output:
                log.message("protocol detected: %s" % "HTTP")
                # save the reassembled http session IPs to FlowDetails

                # this part is checking the http handler module name and importing the handler
                http_protocol_handler = settings.HTTP_HANDLER
                package = "ovizart.modules.traffic.parser.tcp"
                module_name = ".".join([package, http_protocol_handler])
                # from ovizart.modules.traffic.parser.tcp.x import handler as imported_module
                http_handler_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
                http_handler = http_handler_module.Handler()
                # define a get_flow_ips function for the custom handler if required
                # TODO: save the timestamps of the flows
                flow_ips = http_handler.get_flow_ips(path=upload_path)
                flow_detail_li = []
                for detail in flow_ips:
                    flow_detail, create = FlowDetails.objects.get_or_create(parent_hash_value=request.session['uploaded_hash'], user_id=user_id, src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="http", timestamp = detail[4])
                    flow_detail_li.append(flow_detail)
                if flow_file.details:
                    pre_li = flow_file.details
                    pre_li.extend(flow_detail_li)
                    flow_file.details = pre_li
                else:
                    flow_file.details = flow_detail_li
                flow_file.save()
                # then call functions that will save request and responses that will parse dat files, save the headers and files
                #http_handler.save_request(path=upload_path, hash_value=request.session['uploaded_hash'])
                #http_handler.save_response(path=upload_path, hash_value=request.session['uploaded_hash'])
                http_handler.save_request_response(path=upload_path, hash_value=request.session['uploaded_hash'])

            # dns realted issues starts here
            if output and "dns" in output:
                log.message("protocol detected: %s" % "DNS")
                dns_protocol_handler = settings.DNS_HANDLER
                package = "ovizart.modules.traffic.parser.udp"
                module_name = ".".join([package, dns_protocol_handler])
                # from ovizart.modules.traffic.parser.udp.x import handler as imported_module
                dns_handler_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
                dns_handler = dns_handler_module.Handler()
                # define a get_flow_ips function for the custom handler if required
                flow_ips = dns_handler.get_flow_ips(path=upload_path, file_name=request.session['uploaded_file_name'])
                flow_detail_li = []
                for detail in flow_ips:
                    flow_detail, create = FlowDetails.objects.get_or_create(parent_hash_value=request.session['uploaded_hash'], user_id=user_id, src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="dns", timestamp = detail[4])
                    flow_detail_li.append(flow_detail)
                if flow_file.details:
                    pre_li = flow_file.details
                    pre_li.extend(flow_detail_li)
                    flow_file.details = pre_li
                else:
                    flow_file.details = flow_detail_li
                flow_file.save()

                dns_handler.save_request_response()

            if output and "smtp" in output:
                log.message("protocol detected: %s" % "SMTP")
                smtp_protocol_handler = settings.SMTP_HANDLER
                package = "ovizart.modules.traffic.parser.tcp"
                module_name = ".".join([package, smtp_protocol_handler])
                # from ovizart.modules.traffic.parser.tcp.x import handler as imported_module
                smtp_handler_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
                smtp_handler = smtp_handler_module.Handler()
                # define a get_flow_ips function for the custom handler if required
                smtp_handler.set_flow(flow_file) # i need this, to get the timestamp from a packet belongs to the flow
                flow_ips = smtp_handler.get_flow_ips(path=upload_path, file_name=request.session['uploaded_file_name'])
                flow_detail_li = []
                for detail in flow_ips:
                    flow_detail, create = FlowDetails.objects.get_or_create(parent_hash_value=request.session['uploaded_hash'], user_id=user_id, src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="smtp", timestamp = detail[4])
                    flow_detail_li.append(flow_detail)
                if flow_file.details:
                    pre_li = flow_file.details
                    pre_li.extend(flow_detail_li)
                    flow_file.details = pre_li
                else:
                    flow_file.details = flow_detail_li
                flow_file.save()

                smtp_handler.save_request_response(upload_path=upload_path)

            else:
                log.message("protocol detected: %s" % "Unknown")
                unknown_protocol_handler = settings.UNKNOWN_HANDLER
                package = "ovizart.modules.traffic.parser"
                module_name = ".".join([package, unknown_protocol_handler])
                unknown_handler_module = getattr(__import__(module_name, fromlist=["handler"]), "handler")
                unknown_handler = unknown_handler_module.Handler()
                flow_ips = unknown_handler.get_flow_ips(path=upload_path, file_name=request.session['uploaded_file_name'], parent_hash_value=request.session['uploaded_hash'], user_id=user_id)
                flow_detail_li = []
                for detail in flow_ips:
                    flow_detail, create = FlowDetails.objects.get_or_create(parent_hash_value=request.session['uploaded_hash'], user_id=user_id, src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="unknown", timestamp = detail[4])
                    if created:
                        flow_detail_li.append(flow_detail)

                if flow_file.details:
                    pre_li = flow_file.details
                    pre_li.extend(flow_detail_li)
                    flow_file.details = pre_li
                else:
                    flow_file.details = flow_detail_li
                flow_file.save()



    else:
        form = UploadPcapForm()
        context['form'] = form

    request.session['message'] = False
    return render_to_response("pcap/upload.html",
            context_instance=RequestContext(request, context))
Beispiel #15
0
class Handler(TcpHandler):
    def __init__(self):
        super(Handler, self).__init__()
        self.log = Logger("HTTP Protocol Handler", "DEBUG")
        self.log.message("HTTP protocol handler called")

#    def read_http(self, tcp):
#        request = self.check_request(tcp)
#        if request:
#            request_dict = {'method': request.method, 'uri': request.uri, 'headers': request.headers, 'version': request.version}
#            return {'request': request_dict}
#        else:
#            response = self.check_response(tcp)
#            if response:
#                response_dict = {'headers': response.headers, 'status': response.status, 'body': response.body, 'version': response.version}
#                return {'response': response_dict, 'tcp_id': tcp.id}
#            return False
#
#    def check_request(self, tcp):
#        data = tcp.data
#        try:
#            return dpkt.http.Request(data)
#        except dpkt.UnpackError:
#            return False

#    def check_response(self, tcp):
#        data = tcp.data
#        try:
#            return dpkt.http.Response(data)
#        except dpkt.UnpackError:
#            return False
#
#    def get_html(self, response_dict):
#        #response will be the dictionary response created after the read_http runs
#        html = None
#        headers = response_dict['headers']
#        body = response_dict['body']
#        if 'content-encoding' in headers and headers['content-encoding'] == 'gzip':
#            data = StringIO.StringIO(body)
#            gzipper = gzip.GzipFile(fileobj = data)
#            html = gzipper.read()
#        else:
#            html = body
#        return html
#
#    def save_html(self, html, path):
#        html_dir = "/".join([path, "html"])
#        if not os.path.exists(path):
#            os.mkdir(html_dir)
#        html_list = os.listdir(html_dir)
#        if not html_list:
#            stream_name = "0.html"
#        else:
#            # the html names will be under html directory with the increasing order as 0.html, 1.html for each flow
#            names = map(lambda x: int(x.split(".")[0]), html_list)
#            names.sort()
#            stream_name = str(names[-1] + 1) + ".html"
#        stream_path = "/".join([html_dir, stream_name])
#        htmlfile = open(stream_path, 'w')
#        htmlfile.write(html)
#        htmlfile.close()
#        return stream_path
#
#    def get_js(self, path, tcp):
#        # get the path of html file
#        base = os.path.dirname(path)
#        js_dir = "js"
#        js_dir_path = "/".join([base, js_dir])
#        if not os.path.exists(js_dir_path):
#            os.mkdir(js_dir_path)
#        doc = fromstring(path)
#        # first the header part
#        header = doc.header
#        scripts = header.cssselect('script')
#        for script in scripts:
#            # check whether it defines a src
#            items = script.items()
#            if items:
#                #[('src', 'index_files/adnet_async.js'), ('type', 'text/javascript')]
#                # i should do something for these files to, need the requested url
#                js_status = False
#                src_status = False
#                src = None
#                for item in items:
#                    if 'type' in item and 'text/javascript' in item:
#                        js_status = False
#                    if 'src' in item:
#                        src_status = True
#                        src = item[1]
#
#                if js_status and src_status:
#                    file_name = src.split("/")[-1]
#                    url = "/".join([tcp.dst_ip, src])
#                    u = urllib2.urlopen(url)
#                    path = "/".join([js_dir_path, file_name])
#                    localFile = open(path, 'w')
#                    localFile.write(u.read())
#                    localFile.close()
#
#            else:
#                # text between script headers
#                txt = script.text()
#                data = StringIO.StringIO(txt)
#                # create a file and save it
#                tmp = tempfile.NamedTemporaryFile(mode="w+", dir=js_dir_path, delete=False)
#                tmp.write(data)
#                tmp.close()
#
#    def read_http_log(self, path):
#        # first check whether there is an http.log created
#        result = []
#        full_path = "/".join([path, "http.log"])
#        if os.path.exists(full_path):
#            f = open(full_path, "r")
#            for line in f.readlines():
#                if line.startswith("#"):
#                    continue
#                else:
#                    data = line.split()
#                    # src ip, sport, dst ip, dport
#                    result.append(data[2:6])
#        else:
#            return False
#
#        return result

    def read_dat_files(self, path):
        result = []
        files = os.listdir(path)
        for f in files:
            f_path = "/".join([path, f])
            if os.path.isdir(f_path):
                continue
            #contents_192.168.1.5:42825-62.212.84.227:80_orig.dat
            name = f.split("_")
            extension = name[-1].split(".")[-1]
            if extension == "dat":
                communication = name[1].split("-")
                source = communication[0].split(":")
                destination = communication[1].split(":")
                source.extend(destination)
                result.append(source)
            else:
                continue

        return result

    def read_conn_log(self, path):
        result = dict() # lets the keys the connection id, values the ts
        conn_log_path = "/".join([path, "conn.log"])
        f = open(conn_log_path, "r")
        for line in f.readlines():
            if line.startswith("#"): continue
            info = line.split()
            key = info[2:6]
            value = info[0]
            result[str(key)] = value

        return result


    def get_flow_ips(self, **args):
        # TODO: add reading the conn log and parse the time stamp for each
        flows =  self.read_dat_files(args['path'])
        ts = self.read_conn_log(args['path'])
        for flow in flows:
            timestamp = float(ts[str(flow[0:5])])
            dt = datetime.datetime.fromtimestamp(timestamp)
            flow.append(dt)

        return flows


    def save_request(self, **args):
        # the the ip from database

        try:
            flow = Flow.objects.get(hash_value=args['hash_value'])
            flow_details = flow.details
            for detail in flow_details:
                # create the orig file ex: contents_192.168.1.5:42825-62.212.84.227:80_orig.dat
                source_str = ":".join([detail.src_ip, str(detail.sport)])
                destination_str = ":".join([detail.dst_ip, str(detail.dport)])
                flow_str = "-".join([source_str, destination_str])
                orig_file = "_".join(["contents", flow_str,"orig.dat"])
                file_path = "/".join([args['path'], orig_file])
                file_path = str(file_path)

                strings = ["GET", "PUT", "POST"]
                file_handler = FileHandler()
                requests = []
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    requests.append(item[0])

                # i am making a hacky thing here, finding empty lines, each request is separeted with an empty line
                empty_lines = []
                strings = ["\r\n\r\n"]
                search_li = file_handler.search(file_path, strings)
                if not search_li: continue
                for item in search_li:
                    empty_lines.append(item[0])

                for x in range(len(requests)):
                    # here i have the request header
                    data = file_handler.data
                    request = data[requests[x]:empty_lines[x]]
                    request_li = request.split("\n")

                    for entry in request_li:
                        # the first line is method and uri with version information
                        info = entry.split(":")
                        if len(info) == 1:
                            info = info[0].split()
                            method = info[0]
                            uri = info[1]
                            version = info[2].split("/")[1]

                            try:
                                http_details = HTTPDetails.objects.get(http_type="request", method=method, uri=uri, headers=request_li, version=version, flow_deatils=detail)
                            except:
                                http_details = HTTPDetails(http_type="request", method=method, uri=uri, headers=request_li, version=version, flow_details=detail)
                                http_details.save()
            return True

        except Exception, ex:
            return False
Beispiel #16
0
class Handler:
    def __init__(self, debug_mode="DEBUG"):
        self._logger = Logger(log_name="Pcap Handler", log_mode=debug_mode)
        self._logger.message("Pcap Handler initialized")
        self._pcap = None
        self._filter_type = None
        self._file_pointer = None

    def open_file(self, pcap_file, mode="rb"):
        try:
            self._file_pointer = file(pcap_file, mode)
            self._logger.set_log_level("DEBUG")
            self._logger.message(
                ("%s is opened at %s mode") % (pcap_file, mode))
        except:
            self._logger.set_log_level("ERROR")
            self._logger.message("Error at opening pcap file")

    def open_pcap(self, mode="r"):
        if mode == "r":
            self._pcap = dpkt.pcap.Reader(self._file_pointer)
            self._logger.set_log_level("DEBUG")
            self._logger.message("pcap reader is created")
        if mode == "w":
            self._pcap = dpkt.pcap.Writer(self._file_pointer)

    def write_pcap(self, buf, ts):
        self._pcap.writepkt(buf, ts)

    def close_file(self):
        self._file_pointer.close()

    def set_filter_type(self, t):
        self._filter_type = t
        self._logger.set_log_level("DEBUG")
        self._logger.message(("Filter type is set %s") % (t))

    def get_filter_type(self):
        return self._filter_type

    def get_pcap(self):
        return self._pcap

    def get_eth(self, buf):
        eth = dpkt.ethernet.Ethernet(buf)
        if eth.type == dpkt.ethernet.ETH_TYPE_IP:
            return eth
        else:
            self._logger.set_log_level("ERROR")
            self._logger.message("No Eth is returned")
            return False

    def get_ip(self, eth):
        ip = eth.data
        if ip.p == dpkt.ip.IP_PROTO_TCP:
            return ip
        else:
            self._logger.set_log_level("ERROR")
            self._logger.message("No IP is returned")
            return False

    def get_tcp(self, ip):
        tcp = ip.data
        #self._logger.message(("TCP is returned %s") % (tcp))
        return tcp

    def get_udp(self, ip):
        udp = ip.data
        return udp

    def get_reader(self):
        return self._pcap
Beispiel #17
0
 def __init__(self):
     super(Handler, self).__init__()
     self.log = Logger("Base Protocol Handler", "DEBUG")
     self.log.message("base protocol handler called")
Beispiel #18
0
class Handler():
    def __init__(self):
        self.log = Logger("DNS Protocol Handler", "DEBUG")
        self.log.message("DNS protocol handler called")
        self.dns_li = []
        self.flow_li = []

    def get_flow_ips(self, **args):
        path = args['path']
        file_name = args['file_name']
        # TODO: this handler should read the conn.log and get the flows there
        p_read_handler = PcapHandler()
        file_path = "/".join([path, file_name])
        p_read_handler.open_file(file_path)
        p_read_handler.open_pcap()
        udp_handler = UDPHandler()
        for ts, buf in p_read_handler.get_reader():
            udp = udp_handler.read_udp(ts, buf)
            if udp:
                try:
                    dns = dpkt.dns.DNS(udp.data)
                    self.flow_li.append([udp_handler.src_ip, udp_handler.sport, udp_handler.dst_ip, udp_handler.dport, udp_handler.timestamp])
                except IndexError:
                    continue  #dpkt is not properly handling
                self.dns_li.append(dns)
        return self.flow_li

    def save_request_response(self, **args):
        index = 0
        for msg in self.dns_li:
            if msg.rcode == dpkt.dns.DNS_RCODE_NOERR:
                try:
                    msg.qd[0].type
                except:
                    continue
                if msg.qd[0].type in REQUEST_FLAGS.keys():
                    detail = self.flow_li[index]
                    flow_detail = FlowDetails.objects.get(src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="dns", timestamp = detail[4])
                    try:
                        dns_request = DNSRequest(type=msg.qd[0].type, human_readable_type=REQUEST_FLAGS[msg.qd[0].type], value=msg.qd[0].name, flow_details=flow_detail)
                        dns_request.save()
                    except Exception, ex:
                        print ex
                for an in msg.an:
                    if an.type in RESPONSE_FLAGS.keys():
                        flow_detail = self.flow_li[index]
                        type = an.type
                        human_readable_type = REQUEST_FLAGS[type]
                        value = None
                        if type == dpkt.dns.DNS_SOA:
                            value = [an.mname, an.rname, str(an.serial),str(an.refresh), str(an.retry), str(an.expire), str(an.minimum) ]
                        if type == dpkt.dns.DNS_A:
                            value = [inet_ntoa(an.ip)]
                        if type == dpkt.dns.DNS_PTR:
                            value = [an.ptrname]
                        if type == dpkt.dns.DNS_NS:
                            value = [an.nsname]
                        if type == dpkt.dns.DNS_CNAME:
                            value = [an.cname]
                        if type == dpkt.dns.DNS_HINFO:
                            value = [" ".join(an.text)]
                        if type == dpkt.dns.DNS_MX:
                            value = [an.mxname]
                        if type == dpkt.dns.DNS_TXT:
                            value = " ".join(an.text)
                        if type == dpkt.dns.DNS_AAAA:
                            value = inet_ntop(AF_INET6,an.ip6)
                        detail = self.flow_li[index]
                        flow_detail = FlowDetails.objects.get(src_ip=detail[0], sport=int(detail[1]), dst_ip=detail[2], dport=int(detail[3]), protocol="dns", timestamp = detail[4])
                        dns_response = DNSResponse(type=type, human_readable_type=RESPONSE_FLAGS[type], value=value, flow_details = flow_detail)
                        dns_response.save()
                index += 1
        return True
Beispiel #19
0
 def __init__(self, debug_mode="DEBUG"):
     self._logger = Logger(log_name="Pcap Handler", log_mode=debug_mode)
     self._logger.message("Pcap Handler initialized")
     self._pcap = None
     self._filter_type = None
     self._file_pointer = None
Beispiel #20
0
class Handler:
    def __init__(self, handler, debug_mode="DEBUG"):
        self.pcap = handler.get_pcap()
        self.pcap_handler = handler
        self._logger = Logger(log_name="Flow Handler", log_mode=debug_mode)
        self._logger.message("Flow Handler initialized")

    def get_tcp_flows(self, filter_type="TCP"):
        flow = dict()
        flow_id = 0
        flow_num = 0
        direction = dict(
        )  # 1 is one, 2 is bidirectional, keep the flow numbers as indexes
        index = dict()
        self.pcap_handler.set_filter_type(filter_type)
        for ts, buf in self.pcap:
            eth = self.pcap_handler.get_eth(buf)
            if eth:
                ip = self.pcap_handler.get_ip(eth)
            else:
                continue
            #src_ip = self.ip.src
            #dst_ip = self.ip.dst
            # for human readable ip
            # from socket import inet_ntoa
            # inet_ntoa(dst_ip)
            if self.pcap_handler.get_filter_type() == "TCP":
                if not ip:
                    continue
                tcp = self.pcap_handler.get_tcp(ip)
                forward_index = (ip.src, tcp.sport, ip.dst, tcp.dport)
                backward_index = (ip.dst, tcp.dport, ip.src, tcp.sport)
                if index.has_key(forward_index):
                    flow_num = index[forward_index]
                elif index.has_key(backward_index):
                    flow_num = index[backward_index]
                    direction[flow_num] = 2
                else:
                    index[forward_index] = flow_id
                    flow_num = flow_id
                    direction[flow_num] = 1

                if flow.has_key(flow_num):
                    flow[flow_num].append((buf, ts))
                else:
                    flow[flow_num] = [(buf, ts)]
            flow_id += 1
        return flow, direction

    def save_flow(self, flow, pcap_handler, save_path=""):
        random_key = generate_random_name(10)
        files = dict()
        for key, values in flow.iteritems():
            file_name = ".".join([random_key, str(key), "pcap"])
            full_file_path = "/".join([save_path, file_name])
            if files.has_key(save_path):
                files[save_path].append(file_name)
            else:
                files[save_path] = [file_name]
            pcap_handler.open_file(full_file_path, "w")
            pcap_handler.open_pcap("w")
            for value in values:
                pcap_handler.write_pcap(value[0], value[1])
            pcap_handler.close_file()
        return files
Beispiel #21
0
 def __init__(self):
     self.log = Logger("DNS Protocol Handler", "DEBUG")
     self.log.message("DNS protocol handler called")
     self.dns_li = []
     self.flow_li = []
Beispiel #22
0
class Handler():
    def __init__(self):
        self.log = Logger("DNS Protocol Handler", "DEBUG")
        self.log.message("DNS protocol handler called")
        self.dns_li = []
        self.flow_li = []

    def get_flow_ips(self, **args):
        path = args['path']
        file_name = args['file_name']
        # TODO: this handler should read the conn.log and get the flows there
        p_read_handler = PcapHandler()
        file_path = "/".join([path, file_name])
        p_read_handler.open_file(file_path)
        p_read_handler.open_pcap()
        udp_handler = UDPHandler()
        for ts, buf in p_read_handler.get_reader():
            udp = udp_handler.read_udp(ts, buf)
            if udp:
                try:
                    dns = dpkt.dns.DNS(udp.data)
                    self.flow_li.append([
                        udp_handler.src_ip, udp_handler.sport,
                        udp_handler.dst_ip, udp_handler.dport,
                        udp_handler.timestamp
                    ])
                except IndexError:
                    continue  #dpkt is not properly handling
                self.dns_li.append(dns)
        return self.flow_li

    def save_request_response(self, **args):
        index = 0
        for msg in self.dns_li:
            if msg.rcode == dpkt.dns.DNS_RCODE_NOERR:
                try:
                    msg.qd[0].type
                except:
                    continue
                if msg.qd[0].type in REQUEST_FLAGS.keys():
                    detail = self.flow_li[index]
                    flow_detail = FlowDetails.objects.get(src_ip=detail[0],
                                                          sport=int(detail[1]),
                                                          dst_ip=detail[2],
                                                          dport=int(detail[3]),
                                                          protocol="dns",
                                                          timestamp=detail[4])
                    try:
                        dns_request = DNSRequest(
                            type=msg.qd[0].type,
                            human_readable_type=REQUEST_FLAGS[msg.qd[0].type],
                            value=msg.qd[0].name,
                            flow_details=flow_detail)
                        dns_request.save()
                    except Exception, ex:
                        print ex
                for an in msg.an:
                    if an.type in RESPONSE_FLAGS.keys():
                        flow_detail = self.flow_li[index]
                        type = an.type
                        human_readable_type = REQUEST_FLAGS[type]
                        value = None
                        if type == dpkt.dns.DNS_SOA:
                            value = [
                                an.mname, an.rname,
                                str(an.serial),
                                str(an.refresh),
                                str(an.retry),
                                str(an.expire),
                                str(an.minimum)
                            ]
                        if type == dpkt.dns.DNS_A:
                            value = [inet_ntoa(an.ip)]
                        if type == dpkt.dns.DNS_PTR:
                            value = [an.ptrname]
                        if type == dpkt.dns.DNS_NS:
                            value = [an.nsname]
                        if type == dpkt.dns.DNS_CNAME:
                            value = [an.cname]
                        if type == dpkt.dns.DNS_HINFO:
                            value = [" ".join(an.text)]
                        if type == dpkt.dns.DNS_MX:
                            value = [an.mxname]
                        if type == dpkt.dns.DNS_TXT:
                            value = " ".join(an.text)
                        if type == dpkt.dns.DNS_AAAA:
                            value = inet_ntop(AF_INET6, an.ip6)
                        detail = self.flow_li[index]
                        flow_detail = FlowDetails.objects.get(
                            src_ip=detail[0],
                            sport=int(detail[1]),
                            dst_ip=detail[2],
                            dport=int(detail[3]),
                            protocol="dns",
                            timestamp=detail[4])
                        dns_response = DNSResponse(
                            type=type,
                            human_readable_type=RESPONSE_FLAGS[type],
                            value=value,
                            flow_details=flow_detail)
                        dns_response.save()
                index += 1
        return True
Beispiel #23
0
 def __init__(self):
     super(Handler, self).__init__()
     self.log = Logger("HTTP Protocol Handler", "DEBUG")
     self.log.message("HTTP protocol handler called")
Beispiel #24
0
class Handler:

    def __init__(self, handler, debug_mode="DEBUG"):
        self.pcap = handler.get_pcap()
        self.pcap_handler = handler
        self._logger = Logger(log_name="Flow Handler", log_mode=debug_mode)
        self._logger.message("Flow Handler initialized")

    def get_tcp_flows(self, filter_type="TCP"):
        flow = dict()
        flow_id = 0
        flow_num = 0
        direction = dict() # 1 is one, 2 is bidirectional, keep the flow numbers as indexes
        index = dict()
        self.pcap_handler.set_filter_type(filter_type)
        for ts, buf in self.pcap:
            eth = self.pcap_handler.get_eth(buf)
            if eth:
                ip = self.pcap_handler.get_ip(eth)
            else:
                continue
            #src_ip = self.ip.src
            #dst_ip = self.ip.dst
            # for human readable ip
            # from socket import inet_ntoa
            # inet_ntoa(dst_ip)
            if self.pcap_handler.get_filter_type() == "TCP":
                if not ip:
                    continue
                tcp = self.pcap_handler.get_tcp(ip)
                forward_index = (ip.src, tcp.sport, ip.dst, tcp.dport)
                backward_index = (ip.dst, tcp.dport, ip.src, tcp.sport)
                if index.has_key(forward_index):
                    flow_num = index[forward_index]
                elif index.has_key(backward_index):
                    flow_num = index[backward_index]
                    direction[flow_num] = 2
                else:
                    index[forward_index] = flow_id
                    flow_num = flow_id
                    direction[flow_num] = 1

                if flow.has_key(flow_num):
                    flow[flow_num].append((buf,ts))
                else:
                    flow[flow_num] = [(buf, ts)]
            flow_id += 1
        return flow, direction

    def save_flow(self, flow, pcap_handler, save_path=""):
        random_key = generate_random_name(10)
        files = dict()
        for key, values in flow.iteritems():
            file_name = ".".join([random_key, str(key), "pcap"])
            full_file_path = "/".join([save_path, file_name])
            if files.has_key(save_path):
                files[save_path].append(file_name)
            else:
                files[save_path] = [file_name]
            pcap_handler.open_file(full_file_path, "w")
            pcap_handler.open_pcap("w")
            for value in values:
                pcap_handler.write_pcap(value[0], value[1])
            pcap_handler.close_file()
        return files
Beispiel #25
0
 def __init__(self, handler, debug_mode="DEBUG"):
     self.pcap = handler.get_pcap()
     self.pcap_handler = handler
     self._logger = Logger(log_name="Flow Handler", log_mode=debug_mode)
     self._logger.message("Flow Handler initialized")
Beispiel #26
0
 def __init__(self):
     self.file_path = None
     self.file_name = None
     self.stream = None
     self.data = None
     self.log = Logger("File Handler", "DEBUG")
Beispiel #27
0
 def __init__(self, handler, debug_mode="DEBUG"):
     self.pcap = handler.get_pcap()
     self.pcap_handler = handler
     self._logger = Logger(log_name="Flow Handler", log_mode=debug_mode)
     self._logger.message("Flow Handler initialized")
Beispiel #28
0
 def __init__(self):
     self.log = Logger("DNS Protocol Handler", "DEBUG")
     self.log.message("DNS protocol handler called")
     self.dns_li = []
     self.flow_li = []
Beispiel #29
0
class Handler:

    def __init__(self, debug_mode="DEBUG"):
        self._logger = Logger(log_name="Pcap Handler", log_mode=debug_mode)
        self._logger.message("Pcap Handler initialized")
        self._pcap = None
        self._filter_type = None
        self._file_pointer = None

    def open_file(self, pcap_file, mode="rb"):
        try:
            self._file_pointer = file(pcap_file, mode)
            self._logger.set_log_level("DEBUG")
            self._logger.message(("%s is opened at %s mode") % (pcap_file, mode))
        except:
            self._logger.set_log_level("ERROR")
            self._logger.message("Error at opening pcap file")

    def open_pcap(self, mode="r"):
        if mode == "r":
            self._pcap = dpkt.pcap.Reader(self._file_pointer)
            self._logger.set_log_level("DEBUG")
            self._logger.message("pcap reader is created")
        if mode == "w":
            self._pcap = dpkt.pcap.Writer(self._file_pointer)

    def write_pcap(self, buf, ts):
        self._pcap.writepkt(buf, ts)

    def close_file(self):
        self._file_pointer.close()

    def set_filter_type(self, t):
        self._filter_type = t
        self._logger.set_log_level("DEBUG")
        self._logger.message(("Filter type is set %s") % (t))

    def get_filter_type(self):
        return self._filter_type

    def get_pcap(self):
        return self._pcap

    def get_eth(self, buf):
        eth = dpkt.ethernet.Ethernet(buf)
        if eth.type == dpkt.ethernet.ETH_TYPE_IP:
            return eth
        else:
            self._logger.set_log_level("ERROR")
            self._logger.message("No Eth is returned")
            return False

    def get_ip(self, eth):
        ip = eth.data
        if ip.p == dpkt.ip.IP_PROTO_TCP:
            return ip
        else:
            self._logger.set_log_level("ERROR")
            self._logger.message("No IP is returned")
            return False

    def get_tcp(self, ip):
        tcp = ip.data
        #self._logger.message(("TCP is returned %s") % (tcp))
        return tcp

    def get_udp(self, ip):
        udp = ip.data
        return udp

    def get_reader(self):
        return self._pcap
Beispiel #30
0
class Handler():
    def __init__(self):
        self.log = Logger("SMTP Protocol Handler", "DEBUG")
        self.log.message("SMTP protocol handler called")
        self.file_name_li = []
        self.flow = None
        self.toProcess = dict()
        self.reportRoot = None
        self.streamcounter = 0

    def get_flow_ips(self, **args):
        path = args['path']
        file_name = args['file_name']
        self.reportRoot = path
        full_path = "/".join([path, file_name])
        cmd = " ".join(["tcpflow -v -r", full_path])
        output = subprocess.Popen(cmd,
                                  shell=True,
                                  stdout=subprocess.PIPE,
                                  stdin=subprocess.PIPE,
                                  stderr=subprocess.PIPE,
                                  cwd=path).communicate()[1]
        result = []
        for line in output.split("\n"):
            if "new flow" in line:
                # the the created flow files are the ones that we are looking for

                ip = line.split(":")[1].strip()
                # test whether this is an smtp flow
                smtp_flow_file_path = "/".join([path, ip])
                if not self.decode_SMTP(smtp_flow_file_path):
                    continue
                self.file_name_li.append(ip)
                src_ip, sport, dst_ip, dport = self.parse_aFile(ip)

                packet = None
                found = False
                for pcap in self.flow.pcaps:
                    # this line is required, otherwise pcap.pcakets is not returning the info
                    pcap = Pcap.objects.get(id=pcap.id)
                    for packet in pcap.packets:
                        if packet.src_ip == src_ip and packet.sport == sport and packet.dst_ip == dst_ip and packet.dport == dport:
                            packet = packet
                            found = True
                            break
                    break

                if found:
                    timestamp = packet.timestamp
                    result.append([src_ip, sport, dst_ip, dport, timestamp])
                    result.append([dst_ip, dport, src_ip, sport, timestamp])

        return result

    def parse_aFile(self, ip):
        ip_info = ip.split("-")
        src = ip_info[0].split(".")
        dst = ip_info[1].split(".")
        src_ip = ".".join(map(lambda x: str(int(x)), src[:4]))
        sport = int(src[4])
        dst_ip = ".".join(map(lambda x: str(int(x)), dst[:4]))
        dport = int(dst[4])

        return src_ip, sport, dst_ip, dport

    def set_flow(self, flow):
        self.flow = flow

    def create_process_dic(self, path):
        for f in self.file_name_li:
            info = dict()
            path = "/".join([path, f])
            info["raw"] = open(path).read().split("\r\n")
            self.toProcess[f] = info

    def save_request_response(self, **args):
        upload_path = args['upload_path']
        self.create_process_dic(upload_path)
        for f in self.file_name_li:
            # both these functions should be reviewed
            smtp_details = self.process_SMTP(f)
            self.report_SMTP(f, smtp_details)

    def process_SMTP(self, aFile):
        a = False
        b = False
        for i in self.toProcess[aFile]['raw']:

            if a and i.startswith("MAIL FROM"):
                a = False
            if b and i == ".":
                b = False

            if a:
                self.toProcess[aFile]['logindata'].append(i)
            if b:
                self.toProcess[aFile]['msgdata'].append(i)

            if i == "AUTH LOGIN":
                a = True
                self.toProcess[aFile]['logindata'] = []
            if i == "DATA":
                b = True
                self.toProcess[aFile]['msgdata'] = []
            if i.startswith("MAIL FROM:"):
                self.toProcess[aFile]['msg_from'] = i[11:]
            if i.startswith("RCPT TO:"):
                self.toProcess[aFile]['rcpt_to'] = i[9:]
        ip = aFile
        src_ip, sport, dst_ip, dport = self.parse_aFile(ip)
        flow_details = FlowDetails.objects.get(src_ip=src_ip,
                                               sport=sport,
                                               dst_ip=dst_ip,
                                               dport=dport)
        smtp_details = SMTPDetails.objects.create(
            login_data=self.toProcess[aFile]['logindata'],
            msg_from=self.toProcess[aFile]['msg_from'],
            rcpt_to=self.toProcess[aFile]['rcpt_to'],
            raw=self.toProcess[aFile]['raw'],
            msgdata=self.toProcess[aFile]['msgdata'],
            flow_details=flow_details)
        return smtp_details

    def report_SMTP(self, aFile, smtp_details):
        self.log.message("Found SMTP Session data at %s" % (aFile))

        if self.toProcess[aFile].has_key("logindata"):
            self.log.message(
                "SMTP AUTH Login: %s" %
                (base64.decodestring(self.toProcess[aFile]['logindata'][0])))
            self.log.message(
                "SMTP AUTH Password: %s" %
                (base64.decodestring(self.toProcess[aFile]['logindata'][1])))
        if self.toProcess[aFile].has_key('msg_from'):
            self.log.message("SMTP MAIL FROM: %s" %
                             (self.toProcess[aFile]['msg_from']))
        if self.toProcess[aFile].has_key("rcpt_to"):
            self.log.message("SMTP RCPT TO: %s" %
                             (self.toProcess[aFile]['rcpt_to']))
        if self.toProcess[aFile].has_key('msgdata'):
            self.streamcounter += 1
            if not os.path.exists(
                    os.path.join(self.reportRoot, "smtp-messages",
                                 str(self.streamcounter))):
                os.makedirs(
                    os.path.join(self.reportRoot, "smtp-messages",
                                 str(self.streamcounter)))

            x = "\r\n".join(self.toProcess[aFile]['msgdata'])
            msg = email.message_from_string(x)
            f = open(
                os.path.join(self.reportRoot, "smtp-messages",
                             str(self.streamcounter), "%s.msg" % (aFile)), "w")
            f.write(x)
            f.close()
            self.log.message("Found email Messages")
            self.log.message("\tWriting to file: %s" %
                             (os.path.join(self.reportRoot, "smtp-messages",
                                           str(self.streamcounter), "%s.msg" %
                                           (aFile))))
            self.log.message(" \tMD5 of msg: %s" %
                             (hashlib.md5(x).hexdigest()))
            counter = 1
            # The great docs at http://docs.python.org/library/email-examples.html
            # show this easy way of breaking up a mail msg
            for part in msg.walk():
                if part.get_content_maintype() == 'multipart':
                    continue
                filename = part.get_filename()
                if not filename:
                    ext = mimetypes.guess_extension(part.get_content_type())
                    if not ext:
                        ext = '.bin'
                    filename = 'part-%03d%s' % (counter, ext)
                part_data = part.get_payload(decode=True)
                part_hash = hashlib.md5()
                part_hash.update(part_data)
                self.log.message("\t\t- Found Attachment")
                self.log.message(
                    " \t\t\t- Writing to filename: %s " %
                    (os.path.join(self.reportRoot, "smtp-messages",
                                  str(self.streamcounter), filename)))
                f = open(
                    os.path.join(self.reportRoot, "smtp-messages",
                                 str(self.streamcounter), filename), "wb")
                f.write(part_data)
                attach_path = smtp_details.attachment_path
                if not attach_path:
                    smtp_details.attachment_path = [
                        os.path.join(self.reportRoot, "smtp-messages",
                                     str(self.streamcounter), filename)
                    ]
                else:
                    attach_path.append(
                        os.path.join(self.reportRoot, "smtp-messages",
                                     str(self.streamcounter), filename))
                    smtp_details.attachment_path = attach_path
                smtp_details.save()
                f.close()
                self.log.message(" \t\t\t- Type of Attachement: %s" %
                                 (part.get_content_type()))
                self.log.message(" \t\t\t- MDS of Attachement: %s" %
                                 (part_hash.hexdigest()))
                if filename.endswith(".zip") or filename.endswith(".docx"):
                    self.log.message(
                        "\t\t\t\t- ZIP Archive attachment extracting")
                    if not os.path.exists(
                            os.path.join(self.reportRoot, "smtp-messages",
                                         str(self.streamcounter),
                                         "%s.unzipped" % (filename))):
                        os.makedirs(
                            os.path.join(self.reportRoot, "smtp-messages",
                                         str(self.streamcounter),
                                         "%s.unzipped" % (filename)))
                    zfp = os.path.join(self.reportRoot, "smtp-messages",
                                       str(self.streamcounter),
                                       "%s.unzipped" % (filename))
                    zf = zipfile.ZipFile(
                        os.path.join(self.reportRoot, "smtp-messages",
                                     str(self.streamcounter), filename))
                    for name in zf.namelist():
                        try:
                            (path,
                             fname) = os.path.split(os.path.join(zfp, name))
                            os.makedirs(path)
                            f = open(os.path.join(path, fname), 'wb')
                            data = zf.read(name)
                            f.write(data)
                            self.log.message(" Found file")
                            self.log.message(" Writing to filename: %s" %
                                             (os.path.join(path, fname)))
                            self.log.message(
                                " Type of file: %s" % (mimetypes.guess_type(
                                    os.path.join(path, fname))[0]))
                            self.log.message(" MD5 of File: %s" %
                                             (hashlib.md5(data).hexdigest()))
                        except Exception, ex:
                            self.log.message(ex)