コード例 #1
0
ファイル: views.py プロジェクト: maurakilleen/crits_services
def pyew_port(request):

    sc = manager.get_config('Pyew')
    port = str(sc['port'])
    secure = str(sc['secure'])
    data = {'port': port, 'secure': secure}
    return HttpResponse(json.dumps(data), mimetype="application/json")
コード例 #2
0
ファイル: views.py プロジェクト: 9b/crits_services
def pyew_port(request):

    sc = manager.get_config('Pyew')
    port = str(sc['port'])
    secure = str(sc['secure'])
    data = {'port': port,
            'secure': secure}
    return HttpResponse(json.dumps(data), mimetype="application/json")
コード例 #3
0
ファイル: pyew_wsh.py プロジェクト: 9b/crits_services
def start_pyew_shell(request, id_, token):

    # Make sure we can find pyew
    sc = manager.get_config('Pyew')
    pyew = str(sc['pyew'])

    if not os.path.exists(pyew):
        text = "\nPyew not found"
        request.ws_stream.send_message(base64.b64encode(text),
                                       binary=False)
        sys.exit(1)

    # Find CRITs user by token
    query = {'unsupported_attrs.pyew_token': token}
    user = CRITsUser.objects(__raw__=query).first()
    if not user:
        text = "\nCould not validate user"
        request.ws_stream.send_message(base64.b64encode(text),
                                       binary=False)
        sys.exit(1)

    # Remove this one-time use token
    ua = user.unsupported_attrs
    delattr(ua, 'pyew_token')
    user.unsupported_attrs = ua
    try:
        user.save()
    except:
        pass

    # Make sure we have a sample to work with that this user has access to
    sample = Sample.objects(id=id_, source__name__in=user.sources).first()
    if not sample:
        text = "\nNo Sample found"
        request.ws_stream.send_message(base64.b64encode(text),
                                       binary=False)
        sys.exit(1)
    sample_data = sample.filedata.read()
    if not sample_data:
        text = "\nCould not get Sample from GridFS: %s" % id_
        request.ws_stream.send_message(base64.b64encode(text),
                                       binary=False)
        sys.exit(1)

    # write Sample to disk
    # temp_sample is the sample to read
    try:
        temp_sample = tempfile.NamedTemporaryFile(delete=False)
        sample_name = temp_sample.name
        temp_sample.write(sample_data)
        temp_sample.close()
    except Exception, e:
        text = "\nError writing file to disk: %s" % e
        request.ws_stream.send_message(base64.b64encode(text),
                                       binary=False)
        sys.exit(1)
コード例 #4
0
def start_pyew_shell(request, id_, token):

    # Make sure we can find pyew
    sc = manager.get_config('Pyew')
    pyew = str(sc['pyew'])

    if not os.path.exists(pyew):
        text = "\nPyew not found"
        request.ws_stream.send_message(base64.b64encode(text), binary=False)
        sys.exit(1)

    # Find CRITs user by token
    query = {'unsupported_attrs.pyew_token': token}
    user = CRITsUser.objects(__raw__=query).first()
    if not user:
        text = "\nCould not validate user"
        request.ws_stream.send_message(base64.b64encode(text), binary=False)
        sys.exit(1)

    # Remove this one-time use token
    ua = user.unsupported_attrs
    delattr(ua, 'pyew_token')
    user.unsupported_attrs = ua
    try:
        user.save()
    except:
        pass

    # Make sure we have a sample to work with that this user has access to
    sample = Sample.objects(id=id_, source__name__in=user.sources).first()
    if not sample:
        text = "\nNo Sample found"
        request.ws_stream.send_message(base64.b64encode(text), binary=False)
        sys.exit(1)
    sample_data = sample.filedata.read()
    if not sample_data:
        text = "\nCould not get Sample from GridFS: %s" % id_
        request.ws_stream.send_message(base64.b64encode(text), binary=False)
        sys.exit(1)

    # write Sample to disk
    # temp_sample is the sample to read
    try:
        temp_sample = tempfile.NamedTemporaryFile(delete=False)
        sample_name = temp_sample.name
        temp_sample.write(sample_data)
        temp_sample.close()
    except Exception, e:
        text = "\nError writing file to disk: %s" % e
        request.ws_stream.send_message(base64.b64encode(text), binary=False)
        sys.exit(1)
コード例 #5
0
ファイル: forms.py プロジェクト: alshech/crits_services
    def __init__(self, username, item, *args, **kwargs):
        """
        Initialize the form.
        Populates form fields based on context object (item) and its related items.
        The way the form fields are populated ensures that only STIXifyable / CybOXable
        options are provided.
        """
        super(TAXIIForm, self).__init__(*args, **kwargs)
        sc = manager.get_config('taxii_service')

        # Avoid options that cause failure: set recipients to intersection of
        # user's sources and the sources that have TAXII feeds configured
        user_srcs = user_sources(username)
        taxii_srcs = [crtfile.split(',')[0] for crtfile in sc['certfiles']]
        self.fields['rcpts'].choices = [(n, n) for n in set(user_srcs).intersection(taxii_srcs)]

        # populate all of the multi choice fields with valid options
        # from the context CRITs object's related items.
        for _type in get_supported_types(): # TODO the hardcoded args to collect_objects should be revisited
            collected = collect_objects(item._meta['crits_type'], item.id, 1, 100, 100, [_type], user_srcs)
            field = forms.MultipleChoiceField(required=False, label=_type)
            field.choices = filter_and_format_choices(collected, item, _type)
            self.fields[_type] = field
コード例 #6
0
ファイル: handlers.py プロジェクト: alshech/crits_services
def execute_taxii_agent(hostname=None, https=None, feed=None, keyfile=None,
                        certfile=None, start=None, end=None, analyst=None,
                        method=None):
    ret = {
            'Certificate': [],
            'Domain': [],
            'Email': [],
            'Event': [],
            'Indicator': [],
            'IP': [],
            'PCAP': [],
            'RawData': [],
            'Sample': [],
            'successes': 0,
            'failures': [],
            'status': False,
            'reason': ''
          }

    sc = manager.get_config('taxii_service')
    # XXX: Validate these!
    if not hostname:
        hostname = str(sc['hostname'])
    if not keyfile:
        keyfile = str(sc['keyfile'])
    if not certfile:
        certfile = str(sc['certfile'])
    if not feed:
        feed = str(sc['data_feed'])
    if https == None:
        https = sc['https']
    create_events = sc['create_events']


    # Last document's end time is our start time.
    if not start:
        last = taxii.Taxii.get_last()
        if last:
            start = pytz.utc.localize(last.end)

    # If start is a string, convert it to a datetime
    # YYYY-MM-DD HH:MM:SS
    if isinstance(start, str):
        start = pytz.utc.localize(parse(start, fuzzy=True))

    # store the current time as the time of this request
    runtime = datetime.now(tzutc())

    # End time is always now, unless specified.
    if not end:
        end = runtime

    # If end is a string, convert it to a datetime
    # YYYY-MM-DD HH:MM:SS
    if isinstance(end, str):
        end = pytz.utc.localize(parse(end, fuzzy=True))

    # compare start and end to make sure:
    # 1) start time is before end time
    # 2) end time is not in the future
    if (start != None and start >= end) and end > runtime:
        ret['reason'] = "Bad timestamp(s)"
        return ret

    client = tc.HttpClient()
    if https:
        client.setUseHttps(True)
        client.setAuthType(tc.HttpClient.AUTH_CERT)
        client.setAuthCredentials({'key_file': keyfile, 'cert_file': certfile})

    if settings.HTTP_PROXY:
        proxy = settings.HTTP_PROXY
        if not proxy.startswith('http://'):
            proxy = 'http://' + proxy
        client.setProxy(proxy, proxy_type=tc.HttpClient.PROXY_HTTPS)

    crits_taxii = taxii.Taxii()
    crits_taxii.runtime = runtime
    crits_taxii.end = end

    # Poll using 1.1 then 1.0 if that fails.
    poll_msg = tm11.PollRequest(message_id=tm11.generate_message_id(),
                                collection_name=feed,
                                poll_parameters=tm11.PollRequest.PollParameters(),
                                exclusive_begin_timestamp_label=start,
                                inclusive_end_timestamp_label=end)

    response = client.callTaxiiService2(hostname, '/poll/', t.VID_TAXII_XML_11,
                                        poll_msg.to_xml())
    taxii_msg = t.get_message_from_http_response(response, poll_msg.message_id)

    if response.getcode() != 200 or taxii_msg.message_type == tm11.MSG_STATUS_MESSAGE:
        # Check if this is a TAXII 1.0 server and try again
        if response.info().getheader('X-TAXII-Content-Type') == t.VID_TAXII_XML_10:
            poll_msg = tm.PollRequest(message_id=tm.generate_message_id(),
                                    feed_name=feed,
                                    exclusive_begin_timestamp_label=start,
                                    inclusive_end_timestamp_label=end)

            response = client.callTaxiiService2(hostname, '/poll/', t.VID_TAXII_XML_10,
                                            poll_msg.to_xml())
            taxii_msg = t.get_message_from_http_response(response, poll_msg.message_id)
            if response.getcode() != 200 or taxii_msg.message_type == tm.MSG_STATUS_MESSAGE:
                ret['reason'] = "%s: %s" % (taxii_msg.status_type,
                                            taxii_msg.message)
                return ret
        else:
            ret['reason'] = "%s: %s" % (taxii_msg.status_type,
                                        taxii_msg.message)
            return ret


    valid = tm.validate_xml(taxii_msg.to_xml())
    if valid != True:
        ret['reason'] = "Invalid XML: %s" % valid
        return ret

    if taxii_msg.message_type != tm.MSG_POLL_RESPONSE:
        ret['reason'] = "No poll response. Unexpected message type: %s" % taxii_msg.message_type
        return ret

    ret['status'] = True

    if not taxii_msg.content_blocks:
        crits_taxii.save()
        return ret

    mid = taxii_msg.message_id
    for content_block in taxii_msg.content_blocks:
        data = parse_content_block(content_block, keyfile, certfile)
        if not data:
            ret['failures'].append(('No data found in content block', 'Data'))
            continue

        objs = import_standards_doc(data, analyst, method, ref=mid,
                                    make_event=create_events)

        for k in objs['imported']:
            ret['successes'] += 1
            ret[k[0]].append(k[1])
        for k in objs['failed']:
            ret['failures'].append(k)


    crits_taxii.save()
    return ret
コード例 #7
0
ファイル: handlers.py プロジェクト: alshech/crits_services
def run_taxii_service(analyst, obj, rcpts, preview, relation_choices=[], confirmed=False):
    """
    :param analyst The analyst triggering this TAXII service call
    :param obj The context object being shared
    :param rcpts The list of sources to which the TAXII message is being sent
    :param preview If true, generate and return the STIX doc, rather than sending via TAXII
    :param relation_choices The list of items related to OBJ that have been chosen for sharing
    :param confirmed True if user has accepted & approved releasability updates
    """
    ret = {
            'success': False, # tells client whether any message was sent successfully
            'rcpts': [], # list of sources the message was sent
            'failed_rcpts': [], # list of sources to which the message failed to be sent
          }

    if not obj: # no item (shouldn't occur unless someone is really trying to break things.)
        ret['reason'] = "No object found."
        return ret

    if not rcpts: # no sources selected in TAXII form (validation prevents this, anyway)
        ret['reason'] = "No recipients selected."
        return ret

    # If dealing with an event context, make sure at least one related item is
    # selected. Events have no real sharing value without related information.
    if obj._meta['crits_type'] == Event._meta['crits_type'] and len(relation_choices) == 0:
        ret['reason'] = "Need at least one related item to send."
        return ret

    # Get config and grab some stuff we need.
    sc = manager.get_config('taxii_service')
    hostname = sc['hostname']
    https = sc['https']
    keyfile = sc['keyfile']
    certfile = sc['certfile']
    certfiles = sc['certfiles']

    # collect the list of destination data feeds for the message
    destination_feeds = []
    for crtfile in certfiles:
        (source, feed, filepath) = crtfile.split(',')
        src = source.strip()
        if src in rcpts:
            destination_feeds.append((src, feed.strip(), filepath.strip()))

    if not destination_feeds or len(destination_feeds) != len(rcpts):
        # TAXII form ensures that this shouldn't happen, but just in case...
        ret['reason'] = "Misconfigured TAXII service -- contact an administrator."
        return ret

    # The minimum required info has been provided by user via the TAXII form.
    # Form configuration and validation ensures the form is valid.
    # The TAXII service has also been confirmed to have config information
    # for each selected recipient.
    #
    # NOTE: this does not guarantee that the message will send to
    # each/any recipient feed successfully.

    # Convert object and chosen related items to STIX/CybOX
    stix_msg = obj.to_stix(relation_choices, bin_fmt="base64")
    stix_doc = stix_msg['stix_obj']

    # if doing a preview of content, return content now
    if preview:
        ret['preview'] = stix_doc.to_xml()
        return ret
    elif not confirmed: # if user has not accepted responsibility for releasability
        release = verify_releasability(rcpts, stix_msg['final_objects'], analyst, False)
        if release: # if releasability needs to change
            ret['release_changes'] = release
            return ret # make user confirm changes, instead of sending messages

    #TODO: this doesn't confirm that 'hostname' is a TAXII server...
    if not resolve_taxii_server(hostname):
        ret['reason'] = "Cannot contact TAXII Server at: %s" % hostname
        return ret

    client = tc.HttpClient()
    if https:
        client.setUseHttps(True)
        client.setAuthType(tc.HttpClient.AUTH_CERT)
        client.setAuthCredentials({'key_file': keyfile, 'cert_file': certfile})

    if settings.HTTP_PROXY:
        proxy = settings.HTTP_PROXY
        if not proxy.startswith('http://'):
            proxy = 'http://' + proxy
        client.setProxy(proxy, proxy_type=tc.HttpClient.PROXY_HTTPS)

    # generate and send inbox messages
    # one message per feed, with appropriate TargetFeed header specified
    # Store each TAXII message in a list.
    for feed in destination_feeds:
        rcpt = feed[0]
        # Create encrypted block
        encrypted_block = encrypt_block(
            tm.ContentBlock(
                content_binding = t.CB_STIX_XML_111,
                content = stix_doc.to_xml()).to_xml(),
            feed[2])
        # Try TAXII 1.1 first:
        try_10 = False
        current_status_type = None
        failed = True
        result = gen_send(tm11, client, encrypted_block, hostname,
                          t.VID_TAXII_XML_11,
                          dcn = [feed[1]],
                          url = "/services/inbox/")
        if len(result) == 2:
            res = result[1]
            if res.status_type == tm11.ST_SUCCESS:
                failed = False
                ret['rcpts'].append(rcpt)
            else:
                try_10 = True
                current_status_type = "<br>tm11: " + res.status_type
        else:
            try_10 = True
            current_status_type = "<br>tm11: " + result[0]

        # Try TAXII 1.0 since 1.1 seems to have failed.
        if try_10:
            result = gen_send(tm, client, encrypted_block, hostname,
                            t.VID_TAXII_XML_10,
                            eh = {'TargetFeed': feed[1]},
                            url = "/inbox/")
            if len(result) == 2:
                res = result[1]
                if res.status_type == tm11.ST_SUCCESS:
                    failed = False
                    ret['rcpts'].append(rcpt)
                else:
                    err = "tm10: " + res.status_type
                    current_status_type += "<br>%s" % err
            else:
                err = "tm10: " + result[0]
                current_status_type += "<br>%s" % err

        if failed:
            ret['failed_rcpts'].append((rcpt, current_status_type))

    if ret['rcpts']: # update releasability for successful TAXII messages
        verify_releasability(ret['rcpts'], stix_msg['final_objects'], analyst, True)

    ret['success'] = True
    return ret
コード例 #8
0
def execute_taxii_agent(hostname=None,
                        feed=None,
                        keyfile=None,
                        certfile=None,
                        start=None,
                        end=None,
                        analyst=None,
                        method=None):
    ret = {
        'events': [],
        'samples': [],
        'emails': [],
        'indicators': [],
        'successes': 0,
        'failures': 0,
        'status': False,
        'reason': ''
    }

    sc = manager.get_config('taxii_service')
    # XXX: Validate these!
    if not hostname:
        hostname = str(sc['hostname'])
    if not keyfile:
        keyfile = str(sc['keyfile'])
    if not certfile:
        certfile = str(sc['certfile'])
    if not feed:
        feed = str(sc['data_feed'])

    # Last document's end time is our start time.
    if not start:
        last = taxii.Taxii.get_last()
        if last:
            start = pytz.utc.localize(last.end)

    # If start is a string, convert it to a datetime
    # YYYY-MM-DD HH:MM:SS
    if isinstance(start, str):
        start = pytz.utc.localize(parse(start, fuzzy=True))

    # store the current time as the time of this request
    runtime = datetime.now(tzutc())

    # End time is always now, unless specified.
    if not end:
        end = runtime

    # If end is a string, convert it to a datetime
    # YYYY-MM-DD HH:MM:SS
    if isinstance(end, str):
        end = pytz.utc.localize(parse(end, fuzzy=True))

    # compare start and end to make sure:
    # 1) start time is before end time
    # 2) end time is not in the future
    if (start != None and start >= end) and end > runtime:
        ret['reason'] = "Bad timestamp(s)"
        return ret

    client = tc.HttpClient()
    client.setUseHttps(True)
    client.setAuthType(tc.HttpClient.AUTH_CERT)
    client.setAuthCredentials({'key_file': keyfile, 'cert_file': certfile})

    if settings.HTTP_PROXY:
        proxy = settings.HTTP_PROXY
        if not proxy.startswith('http://'):
            proxy = 'http://' + proxy
        client.setProxy(proxy, proxy_type=tc.HttpClient.PROXY_HTTPS)

    crits_taxii = taxii.Taxii()
    crits_taxii.runtime = runtime
    crits_taxii.end = end

    poll_msg = tm.PollRequest(message_id=tm.generate_message_id(),
                              feed_name=feed,
                              exclusive_begin_timestamp_label=start,
                              inclusive_end_timestamp_label=end)
    response = client.callTaxiiService2(hostname, '/poll/', t.VID_TAXII_XML_10,
                                        poll_msg.to_xml())

    if response.getcode() != 200:
        ret['reason'] = "Response is not 200 OK"
        return ret

    taxii_msg = t.get_message_from_http_response(response, poll_msg.message_id)

    valid = tm.validate_xml(taxii_msg.to_xml())
    if valid != True:
        ret['reason'] = valid
        return ret

    if taxii_msg.message_type != tm.MSG_POLL_RESPONSE:
        ret['reason'] = "No poll response"
        return ret

    ret['status'] = True

    if not taxii_msg.content_blocks:
        crits_taxii.save()
        return ret

    mid = taxii_msg.message_id
    for content_block in taxii_msg.content_blocks:
        data = parse_content_block(content_block, keyfile, certfile)
        if not data:
            ret['failures'] += 1
            continue

        objs = import_standards_doc(data,
                                    analyst,
                                    method,
                                    ref=mid,
                                    make_event=True)

        ret['successes'] += 1

        for k in ["events", "samples", "emails", "indicators"]:
            for i in objs[k]:
                ret[k].append(i)

    crits_taxii.save()
    return ret
コード例 #9
0
ファイル: handlers.py プロジェクト: 9b/crits_services
def execute_taxii_agent(hostname=None, feed=None, keyfile=None, certfile=None, start=None, end=None, analyst=None, method=None):
    ret = {
            'events': [],
            'samples': [],
            'emails': [],
            'indicators': [],
            'successes': 0,
            'failures': 0,
            'status': False,
            'reason': ''
          }

    sc = manager.get_config('taxii_service')
    # XXX: Validate these!
    if not hostname:
        hostname = str(sc['hostname'])
    if not keyfile:
        keyfile = str(sc['keyfile'])
    if not certfile:
        certfile = str(sc['certfile'])
    if not feed:
        feed = str(sc['data_feed'])

    # Last document's end time is our start time.
    if not start:
        last = taxii.Taxii.get_last()
        if last:
            start = pytz.utc.localize(last.end)

    # If start is a string, convert it to a datetime
    # YYYY-MM-DD HH:MM:SS
    if isinstance(start, str):
        start = pytz.utc.localize(parse(start, fuzzy=True))

    # store the current time as the time of this request
    runtime = datetime.now(tzutc())

    # End time is always now, unless specified.
    if not end:
        end = runtime

    # If end is a string, convert it to a datetime
    # YYYY-MM-DD HH:MM:SS
    if isinstance(end, str):
        end = pytz.utc.localize(parse(end, fuzzy=True))

    # compare start and end to make sure:
    # 1) start time is before end time
    # 2) end time is not in the future
    if (start != None and start >= end) and end > runtime:
        ret['reason'] = "Bad timestamp(s)"
        return ret 

    client = tc.HttpClient()
    client.setUseHttps(True)
    client.setAuthType(tc.HttpClient.AUTH_CERT)
    client.setAuthCredentials({'key_file': keyfile, 'cert_file': certfile})

    if settings.HTTP_PROXY:
        proxy = settings.HTTP_PROXY
        if not proxy.startswith('http://'):
            proxy = 'http://' + proxy
        client.setProxy(proxy, proxy_type=tc.HttpClient.PROXY_HTTPS)

    crits_taxii = taxii.Taxii()
    crits_taxii.runtime = runtime
    crits_taxii.end = end

    poll_msg = tm.PollRequest(message_id=tm.generate_message_id(),
                              feed_name=feed,
                              exclusive_begin_timestamp_label=start,
                              inclusive_end_timestamp_label=end)
    response = client.callTaxiiService2(hostname, '/poll/', t.VID_TAXII_XML_10,
                                        poll_msg.to_xml())

    if response.getcode() != 200:
        ret['reason'] = "Response is not 200 OK"
        return ret

    taxii_msg = t.get_message_from_http_response(response, poll_msg.message_id)

    valid = tm.validate_xml(taxii_msg.to_xml())
    if valid != True:
        ret['reason'] = valid
        return ret

    if taxii_msg.message_type != tm.MSG_POLL_RESPONSE:
        ret['reason'] = "No poll response"
        return ret

    ret['status'] = True

    if not taxii_msg.content_blocks:
        crits_taxii.save()
        return ret

    mid = taxii_msg.message_id
    for content_block in taxii_msg.content_blocks:
        data = parse_content_block(content_block, keyfile, certfile)
        if not data:
            ret['failures'] += 1
            continue

        objs = import_standards_doc(data, analyst, method, ref=mid, make_event=True)

        ret['successes'] += 1

        for k in ["events", "samples", "emails", "indicators"]:
            for i in objs[k]:
                ret[k].append(i)

    crits_taxii.save()
    return ret
コード例 #10
0
def chopshop_carver(pcap_md5, options, analyst):
    # Make sure we can find ChopShop
    sc = manager.get_config('ChopShop')
    shop_path = "%s/shop" % str(sc['basedir'])
    if not os.path.exists(shop_path):
        return {'success': False, 'message': "ChopShop shop path does not exist."}

    sys.path.append(shop_path)
    import ChopLib as CL
    if StrictVersion(str(CL.VERSION)) < StrictVersion('4.0'):
        return {'success': False, 'message': 'Need ChopShop 4.0 or newer'}

    # Until we have an smtp_extractor in ChopShop we have to resort to
    # to (ab)using payloads to dump the entire TCP stream and letting
    # handle_eml() process everything. We also use the payloads module
    # for handling raw carves. If a user wants to do SMTP and raw
    # simultaneously it won't work because we can't distinguish one
    # payloads module from another.
    if options.get('raw', False) and options.get('smtp', False):
        return {'success': False, 'message': "Can not process SMTP and raw simultaneously."}

    # Make sure we have a PCAP to work with
    pcap = PCAP.objects(md5=pcap_md5).first()
    if not pcap:
        return {'success': False, 'message': "No PCAP found."}
    pcap_data = pcap.filedata.read()
    if not pcap_data:
        return {'success': False, 'message': "Could not get PCAP from GridFS: %s" %  pcap_md5}

    source = pcap['source'][0]['name'] # XXX: This kind of sucks...

    # Create module string to pass to ChopShop
    modules = []
    if options.get('http_resp', False) or options.get('http_req', False):
        modules.append("http | http_extractor")

    if options.get('smtp', False) or options.get('raw', False):
        # ChopShop really needs an smtp_extractor, but there's no good
        # capability to do that yet. Maybe one day I'll build one. :)
        # For now, just use payloads and let handle_eml() sort it out.
        #
        # Raw carving works exactly the same way, just post-processed
        # differently.
        modules.append("payloads -b")

    if not modules:
        return {'success': False, 'message': "No modules specified."}

    mod_string = ';'.join(mod for mod in modules)

    from ChopLib import ChopLib
    from ChopUi import ChopUi

    choplib = ChopLib()
    chopui = ChopUi()

    choplib.base_dir = str(sc['basedir'])

    choplib.modules = mod_string

    chopui.jsonout = jsonhandler
    choplib.jsonout = True

    # ChopShop (because of pynids) needs to read a file off disk.
    # Write the pcap data to a temporary file.
    temp_pcap = tempfile.NamedTemporaryFile(delete=False)
    temp_pcap.write(pcap_data)
    temp_pcap.close()

    choplib.filename = temp_pcap.name
    chopui.bind(choplib)
    chopui.start()

    if chopui.jsonclass == None:
        os.unlink(temp_pcap.name)
        return {'success': False, 'message': 'Lost race condition in chopui. Try again.'}

    # ChopUI must be started before the jsonhandler class is insantiated.
    # Tell the class what we are looking for now that it exists.
    chopui.jsonclass.parse_options(options)

    choplib.start()

    while chopui.is_alive():
        time.sleep(.1)

    chopui.join()
    choplib.finish()
    choplib.join()

    os.unlink(temp_pcap.name)

    message = ''

    # Grab any carved HTTP bodies.
    for (md5_digest, (name, blob)) in chopui.jsonclass.http_files.items():
        if handle_file(name, blob, source, parent_md5=pcap_md5, user=analyst, method='ChopShop Filecarver', md5_digest=md5_digest, parent_type='PCAP'):
            # Specifically not using name here as I don't want to deal
            # with sanitizing it
            message += "Saved HTTP body: <a href=\"%s\">%s</a><br />" % (reverse('crits.samples.views.detail', args=[md5_digest]), md5_digest)
        else:
            message += "Failed to save file %s." % md5_digest

    # Grab any carved SMTP returns.
    for blob in chopui.jsonclass.smtp_returns.values():
        ret = handle_eml(blob, source, None, analyst, 'ChopShop FileCarver', 'PCAP', pcap.id)
        if not ret['status']:
            message += ret['reason']
            continue

        message += "Saved email: <a href=\"%s\">%s</a><br />%i attachment(s)<br />" % (reverse('crits.emails.views.email_detail', args=[ret['object'].id]), ret['object'].id, len(ret['attachments'].keys()))

        for md5_digest in ret['attachments'].keys():
            message += "<a href=\"%s\">%s</a><br />" % (reverse('crits.samples.views.detail', args=[md5_digest]), md5_digest)

    # Handle raw returns.
    for id_, blob in chopui.jsonclass.raw_returns.items():
        md5_digest = handle_file(id_, blob, source, parent_md5=pcap_md5, user=analyst, method='ChopShop Filecarver', parent_type='PCAP')
        if md5_digest:
            message += "Saved raw %s: <a href=\"%s\">%s</a><br />" % (id_, reverse('crits.samples.views.detail', args=[md5_digest]), md5_digest)
        else:
            message += "Failed to save raw %s." % md5_digest

    # It's possible to have no files here if nothing matched.
    # Still return True as there were no problems.
    if not message:
        message = 'No files found.'
    return {'success': True, 'message': message}
コード例 #11
0
ファイル: handlers.py プロジェクト: 9b/crits_services
def pcap_tcpdump(pcap_md5, form, analyst):
    flag_list = []
    cleaned_data = form.cleaned_data

    # Make sure we can find tcpdump
    sc = manager.get_config('MetaCap')
    tcpdump_bin = str(sc['tcpdump'])
    if not os.path.exists(tcpdump_bin):
        tcpdump_output = "Could not find tcpdump!"
        return tcpdump_output

    # Make sure we have a PCAP to work with
    pcap = PCAP.objects(md5=pcap_md5).first()
    if not pcap:
        return "No PCAP found"
    pcap_data = pcap.filedata.read()
    if not pcap_data:
        return "Could not get PCAP from GridFS: %s" %  pcap_md5

    # Use the filename if it's there, otherwise the md5.
    # This is used for the description of the carved sample.
    if pcap.filename:
        pcap_filename = pcap.filename
    else:
        pcap_filename = pcap_md5

    # Setup tcpdump arguments
    if cleaned_data['sequence']:
        flag_list.append("-S")
    if cleaned_data['timestamp']:
        flag_list.append("%s" % cleaned_data['timestamp'])
    if cleaned_data['verbose']:
        flag_list.append("%s" % cleaned_data['verbose'])
    if cleaned_data['data']:
        flag_list.append("%s" % cleaned_data['data'])
    # force -nN
    flag_list.append("-nN")
    # if we need to carve
    if cleaned_data['carve']:
        if not cleaned_data['bpf']:
            return "Must supply a BPF filter to carve."
        new_pcap = tempfile.NamedTemporaryFile(delete=False)
        flag_list.append("-w")
        flag_list.append(new_pcap.name)

    if cleaned_data['bpf']:
        flag_list.append('%s' % str(cleaned_data['bpf'].replace('"', '')))

    # write PCAP to disk
    # temp_out collects stdout and stderr
    # temp_pcap is the pcap to read
    # new_pcap is the pcap being written if carving
    temp_out = tempfile.NamedTemporaryFile(delete=False)
    temp_pcap = tempfile.NamedTemporaryFile(delete=False)
    pcap_name = temp_pcap.name
    temp_pcap.write(pcap_data)
    temp_pcap.close()
    args = [tcpdump_bin, '-r', temp_pcap.name] + flag_list
    tcpdump = Popen(args, stdout=temp_out, stderr=STDOUT)
    tcpdump.communicate()
    out_name = temp_out.name
    temp_out.seek(0)
    tcpdump_output = ''
    for line in iter(temp_out):
        tcpdump_output += "%s" % line
    temp_out.close()

    #delete temp files
    os.unlink(pcap_name)
    os.unlink(out_name)

    if cleaned_data['carve']:
        new_pcap_data = new_pcap.read()
        if len(new_pcap_data) > 24: # pcap-ng will change this.
            m = hashlib.md5()
            m.update(new_pcap_data)
            md5 = m.hexdigest()
            org = get_user_organization(analyst)
            result = handle_pcap_file("%s.pcap" % md5,
                                      new_pcap_data,
                                      org,
                                      user=analyst,
                                      description="%s of %s" % (cleaned_data['bpf'], pcap_filename),
                                      parent_id=pcap.id,
                                      parent_type="PCAP",
                                      method="MetaCap Tcpdumper")
            if result['success']:
                tcpdump_output = "<a href=\"%s\">View new pcap.</a>" % reverse('crits.pcaps.views.pcap_details', args=[result['md5']])
            else:
                tcpdump_output = result['message']
        else:
            tcpdump_output = "No packets matched the filter."

        os.unlink(new_pcap.name)

    return tcpdump_output
コード例 #12
0
ファイル: handlers.py プロジェクト: 9b/crits_services
def pcap_pdml_html(pcap_md5, analyst):
    # check to see if there is a File object with the source reference of
    # 'tshark_pdml.html'. If there is, return it.
    # If not, generate it, save it, and return it.
    pcap = PCAP.objects(md5=pcap_md5).first()
    if not pcap:
        return "No PCAP found"
    else:
        coll = settings.COL_OBJECTS
        pdml_obj = None
        pdml_html = None
        for obj in pcap.obj:
            for source in obj.source:
                for instance in source.instances:
                    if instance.reference == 'tshark_pdml.html':
                        pdml_obj = obj
        if not pdml_obj:
            sc = manager.get_config('MetaCap')
            tshark_bin = str(sc['tshark'])
            if not os.path.exists(tshark_bin):
                pdml_html = "Could not find tshark!"
                return {'html': pdml_html}

            pcap_data = pcap.filedata.read()
            if not pcap_data:
                pdml_html =  "Could not get PCAP from GridFS: %s" %  pcap_md5
                return {'html': pdml_html}

            # write PCAP to disk
            temp_pcap = tempfile.NamedTemporaryFile(delete=False)
            pcap_name = temp_pcap.name
            temp_pcap.write(pcap_data)
            temp_pcap.close()

            # use tshark to generate a pdml file
            temp_pdml = tempfile.NamedTemporaryFile(delete=False)
            args = [tshark_bin, "-n", "-r", pcap_name, "-T", "pdml"]
            tshark = Popen(args, stdout=temp_pdml, stderr=PIPE)
            tshark_out, tshark_err = tshark.communicate()
            if tshark.returncode != 0:
                return {'html': "%s, %s" % (tshark_out,tshark_err)}
            pdml_name = temp_pdml.name
            temp_pdml.seek(0)

            # transform PDML into HTML
            xsl_file = None
            for d in settings.SERVICE_DIRS:
                try:
                    file_dir = "%s/metacap_service" % d
                    xsl_file = open('%s/pdml2html.xsl' % file_dir, 'r')
                except IOError:
                    pass
            if not xsl_file:
                return {'html': 'Could not find XSL.'}

            parser = etree.XMLParser()
            parser.resolvers.add(FileResolver())
            save_pdml = False
            try:
                xml_input = etree.parse(temp_pdml, parser)
                xslt_root = etree.parse(xsl_file, parser)
                transform = etree.XSLT(xslt_root)
                pdml_html = str(transform(xml_input))
                save_pdml = True
            except Exception:
                return {'html': 'Could not parse/transform PDML output!'}

            temp_pdml.close()

            # delete PDML file
            os.unlink(pdml_name)
            os.unlink(pcap_name)

            #  save pdml_html as an object for this PCAP
            if save_pdml:
                fn = put_file_gridfs('tshark_pdml.html', pdml_html, collection=coll)
                if fn:
                    m = hashlib.md5()
                    m.update(pdml_html)
                    md5 = m.hexdigest()
                    pcap.add_object("Artifact",
                                    "File",
                                    md5,
                                    get_user_organization(analyst),
                                    "MetaCap",
                                    'tshark_pdml.html',
                                    analyst)
                    pcap.save()
        else:
            # get file from gridfs and return it
            obj_md5 = pdml_obj.value
            pdml_html = get_file_gridfs(obj_md5, collection=coll)
            if not pdml_html:
                return {'html': 'No file found in GridFS'}
        if not pdml_obj:
            pcap_objects = pcap.sort_objects()
            return {'html': pdml_html, 'objects': pcap_objects, 'id': pcap.id}
        else:
            return {'html': pdml_html}