def _process_pcap(self, pcap, scandate): """ Add Pcap file to CRITs. Args: pcap (binary): pcap data scandate (str): scan date from when pcap was collected TODO: Add an error check """ self._info("Adding PCAP and creating relationship to %s" % (str(self.obj.id))) self._notify() h = md5(pcap).hexdigest() result = handle_pcap_file("%s.pcap" % h, pcap, self.obj.source, user=self.current_task.username, description='Created %s' % (scandate), related_id=str(self.obj.id), related_type="Sample", method=self.name, reference=None, relationship=RelationshipTypes.RELATED_TO) self._add_result("pcap added", h, {'md5': h})
def _process_pcap(self, pcap, scandate): """ Add Pcap file to CRITs. Args: pcap (binary): pcap data scandate (str): scan date from when pcap was collected TODO: Add an error check """ self._info("Adding PCAP and creating relationship to %s" % (str(self.obj.id))) self._notify() h = md5(pcap).hexdigest() result = handle_pcap_file("%s.pcap" % h, # File Name pcap, # Pcap data self.obj.source, # Data Source user=self.current_task.username, # User adding the PCAP description='Created %s' % (scandate), # Description related_id=str(self.obj.id), # Top level ID of related object related_type="Sample", # Top level type of the related object method=self.name, # Method for aquiring the PCAP reference=None, # Reference to the source of this PCAP relationship='Related_To') # Relationship between parent and the PCAP self._add_result("pcap added", h, {'md5': h})
def run(self, argv): parser = OptionParser() parser.add_option("-d", "--description", action="store", dest="description", type="string", default="", help="PCAP Description") parser.add_option("-f", "--file", action="store", dest="filename", type="string", help="scanned FILENAME") parser.add_option("-s", "--source", action="store", dest="source", type="string", help="source") parser.add_option("-m", "--method", action="store", dest="method", type="string", help="source method") parser.add_option("-r", "--reference", action="store", dest="reference", type="string", help="source reference") parser.add_option("-p", "--parent", action="store", dest="parent", type="string", default="", help="parent md5") parser.add_option("-P", "--parent-type", action="store", dest="parent_type", type="string", default="PCAP", help="parent type (Sample, PCAP...)") parser.add_option("-t", "--tlp", action="store", dest="tlp", type="string", default="red", help="TLP of data") (opts, args) = parser.parse_args(argv) if not opts.filename: parser.error("File name not provided") filename = opts.filename if not opts.source: parser.error("Source not provided") source = opts.source if not self.user.has_access_to(PCAPACL.WRITE): print "[-] User does not have permission to add PCAP" return description = opts.description parent = opts.parent parent_type = opts.parent_type user = self.user method = opts.method or "Command line add_pcap_file.py" reference = opts.reference tlp = opts.tlp f = open(filename, 'rb') data = f.read() f.close() (dirname, fname) = os.path.split(filename) print parent print parent_type status = handle_pcap_file(fname, data, source, user, description, related_md5=parent, related_type=parent_type, method=method, reference=reference, tlp=tlp) if status['success']: md5 = hashlib.md5(data).hexdigest() print "[+] Added %s (MD5: %s)" % (filename, md5) else: print "[-] %s returned error: %s" % (filename, status['message'])
def obj_create(self, bundle, **kwargs): """ Handles creating PCAPs through the API. :param bundle: Bundle containing the information to create the PCAP. :type bundle: Tastypie Bundle object. :returns: HttpResponse. """ analyst = bundle.request.user.username file_ = bundle.data.get("filedata", None) content = {"return_code": 1, "type": "PCAP"} if not file_: content["message"] = "Upload type of 'file' but no file uploaded." self.crits_response(content) filedata = file_.read() filename = str(file_) source = bundle.data.get("source", None) method = bundle.data.get("method", None) reference = bundle.data.get("reference", None) description = bundle.data.get("description", None) relationship = bundle.data.get("relationship", None) related_id = bundle.data.get("related_id", None) related_md5 = bundle.data.get("related_md5", None) related_type = bundle.data.get("related_type", None) bucket_list = bundle.data.get("bucket_list", None) ticket = bundle.data.get("ticket", None) result = handle_pcap_file( filename, filedata, source, analyst, description, related_id=related_id, related_md5=related_md5, related_type=related_type, method=method, reference=reference, relationship=relationship, bucket_list=bucket_list, ticket=ticket, ) if result.get("message"): content["message"] = result.get("message") if result.get("id"): url = reverse( "api_dispatch_detail", kwargs={"resource_name": "pcaps", "api_name": "v1", "pk": result.get("id")} ) content["url"] = url content["id"] = result.get("id") if result["success"]: content["return_code"] = 0 self.crits_response(content)
def obj_create(self, bundle, **kwargs): """ Handles creating PCAPs through the API. :param bundle: Bundle containing the information to create the PCAP. :type bundle: Tastypie Bundle object. :returns: HttpResponse. """ analyst = bundle.request.user.username file_ = bundle.data.get('filedata', None) content = {'return_code': 1, 'type': 'PCAP'} if not file_: content['message'] = "Upload type of 'file' but no file uploaded." self.crits_response(content) filedata = file_.read() filename = str(file_) source = bundle.data.get('source', None) method = bundle.data.get('method', None) reference = bundle.data.get('reference', None) description = bundle.data.get('description', None) relationship = bundle.data.get('relationship', None) related_id = bundle.data.get('related_id', None) related_md5 = bundle.data.get('related_md5', None) related_type = bundle.data.get('related_type', None) bucket_list = bundle.data.get('bucket_list', None) ticket = bundle.data.get('ticket', None) result = handle_pcap_file(filename, filedata, source, analyst, description, related_id=related_id, related_md5=related_md5, related_type = related_type, method=method, reference=reference, relationship=relationship, bucket_list=bucket_list, ticket=ticket) if result.get('message'): content['message'] = result.get('message') if result.get('id'): url = reverse('api_dispatch_detail', kwargs={'resource_name': 'pcaps', 'api_name': 'v1', 'pk': result.get('id')}) content['url'] = url content['id'] = result.get('id') if result['success']: content['return_code'] = 0 self.crits_response(content)
def upload_pcap(request): """ Add a new PCAP to CRITs. :param request: Django request object (Required) :type request: :class:`django.http.HttpRequest` :returns: :class:`django.http.HttpResponse` """ if request.method == 'POST': form = UploadPcapForm(request.user, request.POST, request.FILES) if form.is_valid(): cleaned_data = form.cleaned_data filedata = request.FILES['filedata'] filename = filedata.name data = filedata.read() # XXX: Should be using chunks here. source = cleaned_data.get('source_name') tlp = cleaned_data.get('source_tlp') user = request.user description = cleaned_data.get('description', '') related = cleaned_data.get('related_id', '') related_type = cleaned_data.get('related_type', '') relationship_type = cleaned_data.get('relationship_type', '') method = cleaned_data.get('source_method', '') or 'Upload' reference = cleaned_data.get('source_reference', '') bucket_list=cleaned_data.get(form_consts.Common.BUCKET_LIST_VARIABLE_NAME) ticket=cleaned_data.get(form_consts.Common.TICKET_VARIABLE_NAME) status = handle_pcap_file(filename, data, source, user, description, related_id=related, related_type=related_type, relationship=relationship_type, method=method, reference=reference, tlp=tlp, bucket_list=bucket_list, ticket=ticket) if status['success']: return render_to_response('file_upload_response.html', {'response': json.dumps({ 'message': 'PCAP uploaded successfully! <a href="%s">View PCAP</a>' % reverse('crits.pcaps.views.pcap_details', args=[status['md5']]), 'success': True})}, RequestContext(request)) else: return render_to_response('file_upload_response.html', {'response': json.dumps({ 'success': False, 'message': status['message']})} , RequestContext(request)) else: return render_to_response('file_upload_response.html', {'response': json.dumps({'success': False, 'form': form.as_table()})}, RequestContext(request)) else: return render_to_response('error.html', {'error': "Expected POST."}, RequestContext(request))
def _process_pcap(self, pcap): self._debug("Processing PCAP.") self._notify() org = get_user_organization(self.current_task.username) h = md5(pcap).hexdigest() result = handle_pcap_file("%s.pcap" % h, pcap, org, user=self.current_task.username, related_id=str(self.obj.id), related_type=self.obj._meta['crits_type'], method=self.name) self._add_result("pcap_added", h, {'md5': h})
def _process_pcap(self, pcap): self._debug("Processing PCAP.") self._notify() org = get_user_organization(self.current_task.username) h = md5(pcap).hexdigest() result = handle_pcap_file("%s.pcap" % h, pcap, org, user=self.current_task.username, parent_id=str(self.obj.id), parent_type="PCAP", method=self.name) self._add_result("pcap_added", h, {'md5': h})
def upload_pcap(request): """ Add a new PCAP to CRITs. :param request: Django request object (Required) :type request: :class:`django.http.HttpRequest` :returns: :class:`django.http.HttpResponse` """ if request.method == 'POST': form = UploadPcapForm(request.user, request.POST, request.FILES) if form.is_valid(): cleaned_data = form.cleaned_data filedata = request.FILES['filedata'] filename = filedata.name data = filedata.read() # XXX: Should be using chunks here. source = cleaned_data.get('source') user = request.user.username description = cleaned_data.get('description', '') parent = cleaned_data.get('parent_id', '') parent_type = cleaned_data.get('parent_type', '') method = 'Upload' bucket_list=cleaned_data.get(form_consts.Common.BUCKET_LIST_VARIABLE_NAME) ticket=cleaned_data.get(form_consts.Common.TICKET_VARIABLE_NAME) status = handle_pcap_file(filename, data, source, user, description, parent_id=parent, parent_type=parent_type, method=method, bucket_list=bucket_list, ticket=ticket) if status['success']: return render_to_response('file_upload_response.html', {'response': json.dumps({ 'message': 'PCAP uploaded successfully! <a href="%s">View PCAP</a>' % reverse('crits.pcaps.views.pcap_details', args=[status['md5']]), 'success': True})}, RequestContext(request)) else: return render_to_response('file_upload_response.html', {'response': json.dumps({ 'success': False, 'message': status['message']})} , RequestContext(request)) else: return render_to_response('file_upload_response.html', {'response': json.dumps({'success': False, 'form': form.as_table()})}, RequestContext(request)) else: return render_to_response('error.html', {'error': "Expected POST."}, RequestContext(request))
def run(self, argv): parser = OptionParser() parser.add_option("-d", "--description", action="store", dest="description", type="string", default="", help="PCAP Description") parser.add_option("-f", "--file", action="store", dest="filename", type="string", help="scanned FILENAME") parser.add_option("-s", "--source", action="store", dest="source", type="string", help="source") parser.add_option("-p", "--parent", action="store", dest="parent", type="string", default="", help="parent md5") parser.add_option("-P", "--parent-type", action="store", dest="parent_type", type="string", default="PCAP", help="parent type (Sample, PCAP...)") parser.add_option("-u", "--user", action="store", dest="user", type="string", default="", help="user") (opts, args) = parser.parse_args(argv) if not opts.filename: parser.error("File name not provided") filename = opts.filename if not opts.source: parser.error("Source not provided") source = opts.source description = opts.description parent = opts.parent parent_type = opts.parent_type user = opts.user method = "Command line add_pcap_file.py" f = open(filename, 'rb') data = f.read() f.close() (dirname, fname) = os.path.split(filename) status = handle_pcap_file(fname, data, source, user, description, parent_md5=parent, parent_type=parent_type, method=method) if status['success']: md5 = hashlib.md5(data).hexdigest() print "[+] Added %s (MD5: %s)" % (filename, md5) else: print "[-] %s returned error: %s" % (filename, status['message'])
def obj_create(self, bundle, **kwargs): """ Handles creating PCAPs through the API. :param bundle: Bundle containing the information to create the PCAP. :type bundle: Tastypie Bundle object. :returns: Bundle object. :raises BadRequest: If filedata is not provided or creation fails. """ analyst = bundle.request.user.username file_ = bundle.data.get('filedata', None) if not file_: raise BadRequest("Upload type of 'file' but no file uploaded.") filedata = file_.read() filename = str(file_) source = bundle.data.get('source', None) method = bundle.data.get('method', None) description = bundle.data.get('reference', None) relationship = bundle.data.get('relationship', None) parent_id = bundle.data.get('related_id', None) parent_md5 = bundle.data.get('related_md5', None) parent_type = bundle.data.get('related_type', None) bucket_list = bundle.data.get('bucket_list', None) ticket = bundle.data.get('ticket', None) result = handle_pcap_file(filename, filedata, source, analyst, description, parent_id=parent_id, parent_md5=parent_md5, parent_type=parent_type, method=method, relationship=relationship, bucket_list=bucket_list, ticket=ticket) if result['success']: return bundle else: raise BadRequest(result['message'])
def obj_create(self, bundle, **kwargs): """ Handles creating PCAPs through the API. :param bundle: Bundle containing the information to create the PCAP. :type bundle: Tastypie Bundle object. :returns: Bundle object. :raises BadRequest: If filedata is not provided or creation fails. """ analyst = bundle.request.user.username file_ = bundle.data.get('filedata', None) if not file_: raise BadRequest("Upload type of 'file' but no file uploaded.") filedata = file_.read() filename = str(file_) source = bundle.data.get('source', None) method = bundle.data.get('method', None) description = bundle.data.get('reference', None) relationship = bundle.data.get('relationship', None) parent_id = bundle.data.get('related_id', None) parent_md5 = bundle.data.get('related_md5', None) parent_type = bundle.data.get('related_type', None) bucket_list = bundle.data.get('bucket_list', None) ticket = bundle.data.get('ticket', None) result = handle_pcap_file(filename, filedata, source, analyst, description, parent_id=parent_id, parent_md5=parent_md5, parent_type = parent_type, method=method, relationship=relationship, bucket_list=bucket_list, ticket=ticket) if result['success']: return bundle else: raise BadRequest(result['message'])
def _process_pcap(self, pcap): self._debug("Processing PCAP.") self._notify() org = get_user_organization(self.current_task.user) user = self.current_task.user if not user.has_access_to(PCAPACL.WRITE): self._info("User does not have permission to add PCAP to CRITs") self._add_result("PCAP Processing Canceled", "User does not have permission to add PCAP to CRITs") return h = md5(pcap).hexdigest() result = handle_pcap_file("%s.pcap" % h, pcap, org, user=self.current_task.user, related_id=str(self.obj.id), related_type=self.obj._meta['crits_type'], method=self.name) self._add_result("pcap_added", h, {'md5': h})
def process_pcap(self, pcap): md5 = hashlib.md5(pcap).hexdigest() filename = "{}.pcap".format(self.obj.filename) ret = handle_pcap_file(filename, pcap, # This is inconsistent with the rest of the code. # However, due to a bug using get_user_organization(self.current_task.user) # raises an exception source_name=self.obj.source, method=self.name, tlp=self.obj.tlp, reference=self.obj.filename, user=str(self.current_task.user), related_id=str(self.obj.id), related_type=self.obj._meta['crits_type']) self._add_result("PCAPs", filename, {'md5': md5}) self._notify()
def parse_cybox_object(self, cbx_obj, description='', ind_id=None): """ Parse a CybOX object form a STIX doc. An object can contain multiple related_objects, which in turn can have their own related_objects, so this handles those recursively. :param cbx_obj: The CybOX object to parse. :type cbx_obj: A CybOX object. :param description: Parent-level (e.g. Observable) description. :type description: str :param ind_id: The ID of a parent STIX Indicator. :type ind_id: str """ # check for missing attributes if not cbx_obj or not cbx_obj.properties: if cbx_obj.idref: # just a reference, so nothing to parse return else: cbx_id = getattr(cbx_obj, 'id_', 'None') self.failed.append(("No valid object_properties was found!", "Observable (%s)" % cbx_id, cbx_id)) # note for display in UI return # Don't parse if already been parsed # This is for artifacts that are related to CybOX File Objects if cbx_obj.id_ in self.parsed: return try: # try to create CRITs object from Cybox Object analyst = self.source_instance.analyst item = cbx_obj.properties val = cbx_obj.id_ if isinstance(item, Address) and not ind_id: if item.category in ('cidr', 'ipv4-addr', 'ipv4-net', 'ipv4-netmask', 'ipv6-addr', 'ipv6-net', 'ipv6-netmask'): imp_type = "IP" for value in item.address_value.values: val = str(value).strip() if self.preview: res = None else: iptype = get_crits_ip_type(item.category) if iptype: res = ip_add_update(val, iptype, [self.source], analyst=analyst, is_add_indicator=True) else: res = {'success': False, 'reason': 'No IP Type'} self.parse_res(imp_type, val, cbx_obj, res, ind_id) if (not ind_id and (isinstance(item, DomainName) or (isinstance(item, URI) and item.type_ == 'Domain Name'))): imp_type = "Domain" for val in item.value.values: if self.preview: res = None else: res = upsert_domain(str(val), [self.source], username=analyst) self.parse_res(imp_type, str(val), cbx_obj, res, ind_id) elif isinstance(item, HTTPSession): imp_type = "RawData" val = cbx_obj.id_ try: c_req = item.http_request_response[0].http_client_request hdr = c_req.http_request_header if hdr.raw_header: data = hdr.raw_header.value title = "HTTP Header from STIX: %s" % self.package.id_ method = self.source_instance.method ref = self.source_instance.reference if self.preview: res = None val = title else: res = handle_raw_data_file(data, self.source.name, user=analyst, description=description, title=title, data_type="HTTP Header", tool_name="STIX", tool_version=None, method=method, reference=ref) else: imp_type = "Indicator" ind_type = "HTTP Request Header Fields - User-Agent" val = hdr.parsed_header.user_agent.value val = ','.join(val) if isinstance(val, list) else val if self.preview: res = None else: res = handle_indicator_ind(val, self.source, ind_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_relationship=True, description=description) except: msg = "Unsupported use of 'HTTPSession' object." res = {'success': False, 'reason': msg} self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, WhoisEntry): # No sure where else to put this imp_type = "RawData" val = cbx_obj.id_ if item.remarks: data = item.remarks.value title = "WHOIS Entry from STIX: %s" % self.package.id_ if self.preview: res = None val = title else: res = handle_raw_data_file(data, self.source.name, user=analyst, description=description, title=title, data_type="Text", tool_name="WHOIS", tool_version=None, method=self.source_instance.method, reference=self.source_instance.reference) else: msg = "Unsupported use of 'WhoisEntry' object." res = {'success': False, 'reason': msg} self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, Artifact): # Not sure if this is right, and I believe these can be # encoded in a couple different ways. imp_type = "RawData" val = cbx_obj.id_ rawdata = item.data.decode('utf-8') # TODO: find out proper ways to determine title, datatype, # tool_name, tool_version title = "Artifact for Event: STIX Document %s" % self.package.id_ if self.preview: res = None val = title else: res = handle_raw_data_file(rawdata, self.source.name, user=analyst, description=description, title=title, data_type="Text", tool_name="STIX", tool_version=None, method=self.source_instance.method, reference=self.source_instance.reference) self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif (isinstance(item, File) and item.custom_properties and item.custom_properties[0].name == "crits_type" and item.custom_properties[0]._value == "Certificate"): imp_type = "Certificate" val = str(item.file_name) data = None if self.preview: res = None else: for rel_obj in item.parent.related_objects: if isinstance(rel_obj.properties, Artifact): data = rel_obj.properties.data self.parsed.append(rel_obj.id_) res = handle_cert_file(val, data, self.source, user=analyst, description=description) self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, File) and self.has_network_artifact(item): imp_type = "PCAP" val = str(item.file_name) data = None if self.preview: res = None else: for rel_obj in item.parent.related_objects: if (isinstance(rel_obj.properties, Artifact) and rel_obj.properties.type_ == Artifact.TYPE_NETWORK): data = rel_obj.properties.data self.parsed.append(rel_obj.id_) res = handle_pcap_file(val, data, self.source, user=analyst, description=description) self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, File): imp_type = "Sample" md5 = item.md5 if md5: md5 = md5.lower() val = str(item.file_name or md5) # add sha1/sha256/ssdeep once handle_file supports it size = item.size_in_bytes data = None if item.file_path: path = "File Path: " + str(item.file_path) description += "\n" + path for rel_obj in item.parent.related_objects: if (isinstance(rel_obj.properties, Artifact) and rel_obj.properties.type_ == Artifact.TYPE_FILE): data = rel_obj.properties.data self.parsed.append(rel_obj.id_) if not md5 and not data and val and val != "None": imp_type = "Indicator" if self.preview: res = None else: res = handle_indicator_ind(val, self.source, "Win File", IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_domain=True, add_relationship=True, description=description) elif md5 or data: if self.preview: res = None else: res = handle_file(val, data, self.source, user=analyst, md5_digest=md5, is_return_only_md5=False, size=size, description=description) else: val = cbx_obj.id_ msg = "CybOX 'File' object has no MD5, data, or filename" res = {'success': False, 'reason': msg} self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, EmailMessage): imp_type = 'Email' id_list = [] data = {} val = cbx_obj.id_ get_attach = False data['raw_body'] = str(item.raw_body) data['raw_header'] = str(item.raw_header) data['helo'] = str(item.email_server) if item.header: data['subject'] = str(item.header.subject) if item.header.date: data['date'] = item.header.date.value val = "Date: %s, Subject: %s" % (data.get('date', 'None'), data['subject']) data['message_id'] = str(item.header.message_id) data['sender'] = str(item.header.sender) data['reply_to'] = str(item.header.reply_to) data['x_originating_ip'] = str(item.header.x_originating_ip) data['x_mailer'] = str(item.header.x_mailer) data['boundary'] = str(item.header.boundary) data['from_address'] = str(item.header.from_) if item.header.to: data['to'] = [str(r) for r in item.header.to.to_list()] if data.get('date'): # Email TLOs must have a date data['source'] = self.source.name data['source_method'] = self.source_instance.method data['source_reference'] = self.source_instance.reference if self.preview: res = None else: res = handle_email_fields(data, analyst, "STIX") self.parse_res(imp_type, val, cbx_obj, res, ind_id) if not self.preview and res.get('status'): id_list.append(cbx_obj.id_) # save ID for atchmnt rels get_attach = True else: # Can't be an Email TLO, so save fields for x, key in enumerate(data): if data[key] and data[key] != "None": if key in ('raw_header', 'raw_body'): if key == 'raw_header': title = "Email Header from STIX Email: %s" d_type = "Email Header" else: title = "Email Body from STIX Email: %s" d_type = "Email Body" imp_type = 'RawData' title = title % cbx_obj.id_ if self.preview: res = None else: res = handle_raw_data_file(data[key], self.source, analyst, description, title, d_type, "STIX", self.stix_version) self.parse_res(imp_type, title, cbx_obj, res, ind_id) elif key == 'to': imp_type = 'Target' for y, addr in enumerate(data[key]): tgt_dict = {'email_address': addr} if self.preview: res = None else: res = upsert_target(tgt_dict, analyst) if res['success']: get_attach = True tmp_obj = copy(cbx_obj) tmp_obj.id_ = '%s-%s-%s' % (cbx_obj.id_, x, y) self.parse_res(imp_type, addr, tmp_obj, res, ind_id) self.ind2obj.setdefault(cbx_obj.id_, []).append(tmp_obj.id_) id_list.append(tmp_obj.id_) else: imp_type = 'Indicator' if key in ('sender', 'reply_to', 'from_address'): ind_type = "Address - e-mail" elif 'ip' in key: ind_type = "Address - ipv4-addr" elif key == 'raw_body': ind_type = "Email Message" else: ind_type = "String" if self.preview: res = None else: res = handle_indicator_ind(data[key], self.source, ind_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_domain=True, add_relationship=True, description=description) if res['success']: get_attach = True tmp_obj = copy(cbx_obj) tmp_obj.id_ = '%s-%s' % (cbx_obj.id_, x) self.parse_res(imp_type, data[key], tmp_obj, res, ind_id) self.ind2obj.setdefault(cbx_obj.id_, []).append(tmp_obj.id_) id_list.append(tmp_obj.id_) if not self.preview: # Setup relationships between all Email attributes for oid in id_list: for oid2 in id_list: if oid != oid2: self.relationships.append((oid, RelationshipTypes.RELATED_TO, oid2, "High")) # Should check for attachments and add them here. if get_attach and item.attachments: for attach in item.attachments: rel_id = attach.to_dict()['object_reference'] for oid in id_list: self.relationships.append((oid, RelationshipTypes.CONTAINS, rel_id, "High")) else: # try to parse all other possibilities as Indicator imp_type = "Indicator" val = cbx_obj.id_ c_obj = make_crits_object(item) # Ignore what was already caught above if (ind_id or c_obj.object_type not in IPTypes.values()): ind_type = c_obj.object_type for val in [str(v).strip() for v in c_obj.value if v]: if ind_type: # handle domains mislabeled as URLs if c_obj.object_type == 'URI' and '/' not in val: ind_type = "Domain" if self.preview: res = None else: res = handle_indicator_ind(val, self.source, ind_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_domain=True, add_relationship=True, description=description) self.parse_res(imp_type, val, cbx_obj, res, ind_id) except Exception, e: # probably caused by cybox object we don't handle self.failed.append((e.message, "%s (%s)" % (imp_type, val), cbx_obj.id_)) # note for display in UI
def parse_observables(self, observables): """ Parse list of observables in STIX doc. :param observables: List of STIX observables. :type observables: List of STIX observables. """ analyst = self.source_instance.analyst for obs in observables: # for each STIX observable if obs.observable_composition: object_list = obs.observable_composition.observables else: object_list = [obs] for obs_comp in object_list: if not obs_comp.object_ or not obs_comp.object_.properties: self.failed.append( ("No valid object_properties was found!", type(obs_comp).__name__, obs_comp.id_)) # note for display in UI continue try: # try to create CRITs object from observable item = obs_comp.object_.properties if isinstance(item, Address): if item.category in ('cidr', 'ipv4-addr', 'ipv4-net', 'ipv4-netmask', 'ipv6-addr', 'ipv6-net', 'ipv6-netmask', 'ipv6-subnet'): imp_type = "IP" for value in item.address_value.values: ip = str(value).strip() iptype = get_crits_ip_type(item.category) if iptype: res = ip_add_update(ip, iptype, [self.source], analyst=analyst, id=self.package.id_) self.parse_res(imp_type, obs, res) if isinstance(item, DomainName): imp_type = "Domain" for value in item.value.values: res = upsert_domain(str(value), [self.source], username=analyst, id=self.package.id_) self.parse_res(imp_type, obs, res) elif isinstance(item, Artifact): # Not sure if this is right, and I believe these can be # encoded in a couple different ways. imp_type = "RawData" rawdata = item.data.decode('utf-8') description = "None" # TODO: find out proper ways to determine title, datatype, # tool_name, tool_version title = "Artifact for Event: STIX Document %s" % self.package.id_ res = handle_raw_data_file( rawdata, self.source.name, user=analyst, description=description, title=title, data_type="Text", tool_name="STIX", tool_version=None, method=self.source_instance.method, reference=self.source_instance.reference) self.parse_res(imp_type, obs, res) elif (isinstance(item, File) and item.custom_properties and item.custom_properties[0].name == "crits_type" and item.custom_properties[0]._value == "Certificate"): imp_type = "Certificate" description = "None" filename = str(item.file_name) data = None for obj in item.parent.related_objects: if isinstance(obj.properties, Artifact): data = obj.properties.data res = handle_cert_file(filename, data, self.source, user=analyst, description=description) self.parse_res(imp_type, obs, res) elif isinstance(item, File) and self.has_network_artifact(item): imp_type = "PCAP" description = "None" filename = str(item.file_name) data = None for obj in item.parent.related_objects: if (isinstance(obj.properties, Artifact) and obj.properties.type_ == Artifact.TYPE_NETWORK): data = obj.properties.data res = handle_pcap_file(filename, data, self.source, user=analyst, description=description) self.parse_res(imp_type, obs, res) elif isinstance(item, File): imp_type = "Sample" filename = str(item.file_name) md5 = item.md5 data = None for obj in item.parent.related_objects: if (isinstance(obj.properties, Artifact) and obj.properties.type_ == Artifact.TYPE_FILE): data = obj.properties.data res = handle_file(filename, data, self.source, user=analyst, md5_digest=md5, is_return_only_md5=False, id=self.package.id_) self.parse_res(imp_type, obs, res) if item.extracted_features: self.parse_filenames(item.extracted_features, res['object'].id) elif isinstance(item, EmailMessage): imp_type = "Email" data = {} data['source'] = self.source.name data['source_method'] = self.source_instance.method data[ 'source_reference'] = self.source_instance.reference data['raw_body'] = str(item.raw_body) data['raw_header'] = str(item.raw_header) data['helo'] = str(item.email_server) if item.header: data['message_id'] = str(item.header.message_id) data['subject'] = str(item.header.subject) data['sender'] = str(item.header.sender) data['reply_to'] = str(item.header.reply_to) data['x_originating_ip'] = str( item.header.x_originating_ip) data['x_mailer'] = str(item.header.x_mailer) data['boundary'] = str(item.header.boundary) data['from_address'] = str(item.header.from_) data['date'] = item.header.date.value if item.header.to: data['to'] = [str(r) for r in item.header.to] if item.header.cc: data['cc'] = [str(r) for r in item.header.cc] res = handle_email_fields(data, analyst, "STIX", id=self.package.id_) # Should check for attachments and add them here. self.parse_res(imp_type, obs, res) if res.get('status') and item.attachments: for attach in item.attachments: rel_id = attach.to_dict()['object_reference'] self.relationships.append( (obs.id_, "Contains", rel_id, "High")) else: # try to parse all other possibilities as Indicator imp_type = "Indicator" obj = make_crits_object(item) if obj.object_type == 'Address': # This was already caught above continue else: ind_type = obj.object_type for value in obj.value: if value and ind_type: res = handle_indicator_ind( value.strip(), self.source, ind_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_domain=True, add_relationship=True) self.parse_res(imp_type, obs, res) except Exception, e: # probably caused by cybox object we don't handle self.failed.append( (e.message, type(item).__name__, item.parent.id_)) # note for display in UI
def parse_observables(self, observables): """ Parse list of observables in STIX doc. :param observables: List of STIX observables. :type observables: List of STIX observables. """ analyst = self.source_instance.analyst for obs in observables: # for each STIX observable if not obs.object_ or not obs.object_.properties: self.failed.append(("No valid object_properties was found!", type(obs).__name__, obs.id_)) # note for display in UI continue try: # try to create CRITs object from observable item = obs.object_.properties if isinstance(item, Address): if item.category in ('cidr', 'ipv4-addr', 'ipv4-net', 'ipv4-netmask', 'ipv6-addr', 'ipv6-net', 'ipv6-netmask'): imp_type = "IP" for value in item.address_value.values: ip = str(value).strip() iptype = get_crits_ip_type(item.category) if iptype: res = ip_add_update(ip, iptype, [self.source], analyst=analyst, is_add_indicator=True) self.parse_res(imp_type, obs, res) if isinstance(item, DomainName): imp_type = "Domain" for value in item.value.values: res = upsert_domain(str(value), [self.source], username=analyst) self.parse_res(imp_type, obs, res) elif isinstance(item, Artifact): # Not sure if this is right, and I believe these can be # encoded in a couple different ways. imp_type = "RawData" rawdata = item.data.decode('utf-8') description = "None" # TODO: find out proper ways to determine title, datatype, # tool_name, tool_version title = "Artifact for Event: STIX Document %s" % self.package.id_ res = handle_raw_data_file(rawdata, self.source.name, user=analyst, description=description, title=title, data_type="Text", tool_name="STIX", tool_version=None, method=self.source_instance.method, reference=self.source_instance.reference) self.parse_res(imp_type, obs, res) elif (isinstance(item, File) and item.custom_properties and item.custom_properties[0].name == "crits_type" and item.custom_properties[0]._value == "Certificate"): imp_type = "Certificate" description = "None" filename = str(item.file_name) data = None for obj in item.parent.related_objects: if isinstance(obj.properties, Artifact): data = obj.properties.data res = handle_cert_file(filename, data, self.source, user=analyst, description=description) self.parse_res(imp_type, obs, res) elif isinstance(item, File) and self.has_network_artifact(item): imp_type = "PCAP" description = "None" filename = str(item.file_name) data = None for obj in item.parent.related_objects: if (isinstance(obj.properties, Artifact) and obj.properties.type_ == Artifact.TYPE_NETWORK): data = obj.properties.data res = handle_pcap_file(filename, data, self.source, user=analyst, description=description) self.parse_res(imp_type, obs, res) elif isinstance(item, File): imp_type = "Sample" filename = str(item.file_name) md5 = item.md5 data = None for obj in item.parent.related_objects: if (isinstance(obj.properties, Artifact) and obj.properties.type_ == Artifact.TYPE_FILE): data = obj.properties.data res = handle_file(filename, data, self.source, user=analyst, md5_digest=md5, is_return_only_md5=False) self.parse_res(imp_type, obs, res) elif isinstance(item, EmailMessage): imp_type = "Email" data = {} data['source'] = self.source.name data['source_method'] = self.source_instance.method data['source_reference'] = self.source_instance.reference data['raw_body'] = str(item.raw_body) data['raw_header'] = str(item.raw_header) data['helo'] = str(item.email_server) if item.header: data['message_id'] = str(item.header.message_id) data['subject'] = str(item.header.subject) data['sender'] = str(item.header.sender) data['reply_to'] = str(item.header.reply_to) data['x_originating_ip'] = str(item.header.x_originating_ip) data['x_mailer'] = str(item.header.x_mailer) data['boundary'] = str(item.header.boundary) data['from_address'] = str(item.header.from_) data['date'] = item.header.date.value if item.header.to: data['to'] = [str(r) for r in item.header.to.to_list()] res = handle_email_fields(data, analyst, "STIX") # Should check for attachments and add them here. self.parse_res(imp_type, obs, res) if res.get('status') and item.attachments: for attach in item.attachments: rel_id = attach.to_dict()['object_reference'] self.relationships.append((obs.id_, "Contains", rel_id, "High")) else: # try to parse all other possibilities as Indicator imp_type = "Indicator" obj = make_crits_object(item) if obj.object_type == 'Address': # This was already caught above continue else: ind_type = obj.object_type for value in obj.value: if value and ind_type: res = handle_indicator_ind(value.strip(), self.source, ind_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_domain=True, add_relationship=True) self.parse_res(imp_type, obs, res) except Exception, e: # probably caused by cybox object we don't handle self.failed.append((e.message, type(item).__name__, item.parent.id_)) # note for display in UI
def add_object(type_, id_, object_type, source, method, reference, user, value=None, file_=None, add_indicator=False, get_objects=True, obj=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}, **kwargs): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param id_: The ObjectId of the top-level object. :type id_: str :param object_type: The type of the ObjectType being added. :type object_type: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param user: The user adding this object. :type user: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_object: bool :param obj: The CRITs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type obj: :class:`crits.core.crits_mongoengine.CritsBaseAttributes` :param is_validate_only: Validate, but do not add to TLO. :type is_validate_only: bool :param is_validate_locally: Validate, but do not add b/c there is no TLO. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ results = {} if id_ == None: id_ = "" if obj == None: obj = class_from_id(type_, id_) from crits.indicators.handlers import validate_indicator_value if value is not None: (value, error) = validate_indicator_value(value, object_type) if error: return {"success": False, "message": error} if is_validate_locally: # no obj provided results['success'] = True return results if not obj: results['message'] = "TLO could not be found" results['success'] = False return results try: cur_len = len(obj.obj) if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename obj.add_object(object_type, value, source, method, reference, user) if is_validate_only == False: obj.save(username=user) new_len = len(obj.obj) if new_len > cur_len: if not is_validate_only: results['message'] = "Object added successfully!" results['success'] = True if file_: # do we have a pcap? if data[:4] in ('\xa1\xb2\xc3\xd4', '\xd4\xc3\xb2\xa1', '\x0a\x0d\x0d\x0a'): handle_pcap_file(filename, data, source, user=user, related_id=id_, related_type=type_) else: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if add_indicator and is_validate_only == False: from crits.indicators.handlers import handle_indicator_ind campaign = obj.campaign if hasattr(obj, 'campaign') else None ind_res = handle_indicator_ind(value, source, object_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, user, method, reference, add_domain=True, campaign=campaign, cache=cache) if ind_res['success']: ind = ind_res['object'] forge_relationship(class_=obj, right_class=ind, rel_type=RelationshipTypes.RELATED_TO, user=user, get_rels=is_sort_relationships) else: results['message'] = "Object was added, but failed to add Indicator." \ "<br>Error: " + ind_res.get('message') if is_sort_relationships == True: if file_ or add_indicator: # does this line need to be here? # obj.reload() results['relationships'] = obj.sort_relationships(user, meta=True) else: results['relationships'] = obj.sort_relationships(user, meta=True) else: results['message'] = "Object already exists! [Type: " + object_type + "][Value: " + value + "] " results['success'] = False if (get_objects): results['objects'] = obj.sort_objects() results['id'] = str(obj.id) return results except ValidationError, e: return {'success': False, 'message': str(e)}
def add_object(type_, id_, object_type, source, method, reference, tlp, user, value=None, file_=None, add_indicator=False, get_objects=True, tlo=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}, **kwargs): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param id_: The ObjectId of the top-level object. :type id_: str :param object_type: The type of the ObjectType being added. :type object_type: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param user: The user adding this object. :type user: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_objects: bool :param tlo: The CRITs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type tlo: :class:`crits.core.crits_mongoengine.CritsBaseAttributes` :param is_sort_relationships: Return all relationships and meta, sorted :type is_sort_relationships: bool :param is_validate_only: Validate, but do not add to TLO. :type is_validate_only: bool :param is_validate_locally: Validate, but do not add b/c there is no TLO. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ # if object_type is a validated indicator type, then validate value if value: from crits.indicators.handlers import validate_indicator_value (value, error) = validate_indicator_value(value, object_type) if error: return {"success": False, "message": error} if is_validate_locally: # no TLO provided return {"success": True} if not tlo: if type_ and id_: tlo = class_from_id(type_, id_) if not tlo: return {'success': False, 'message': "Failed to find TLO"} try: if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename ret = tlo.add_object(object_type, value, source, method, reference, user) if not ret['success']: msg = '%s! [Type: "%s"][Value: "%s"]' return { "success": False, "message": msg % (ret['message'], object_type, value) } else: results = {'success': True} if not is_validate_only: # save the object tlo.update(add_to_set__obj=ret['object']) results['message'] = "Object added successfully" if file_: # do we have a pcap? if detect_pcap(data): handle_pcap_file(filename, data, source, user=user, related_id=id_, related_type=type_) else: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if add_indicator and not is_validate_only: campaign = tlo.campaign if hasattr(tlo, 'campaign') else None from crits.indicators.handlers import handle_indicator_ind ind_res = handle_indicator_ind(value, source, object_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, user, source_method=method, source_reference=reference, source_tlp=tlp, add_domain=True, campaign=campaign, cache=cache) if ind_res['success']: forge_relationship(class_=tlo, right_class=ind_res['object'], rel_type=RelationshipTypes.RELATED_TO, user=user) else: msg = "Object added, but failed to add Indicator.<br>Error: %s" results['message'] = msg % ind_res.get('message') if is_sort_relationships == True: results['relationships'] = tlo.sort_relationships(user, meta=True) if get_objects: results['objects'] = tlo.sort_objects() results['id'] = str(tlo.id) return results except ValidationError as e: return {'success': False, 'message': str(e)}
def add_object(type_, id_, object_type, source, method, reference, tlp, user, value=None, file_=None, add_indicator=False, get_objects=True, tlo=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}, **kwargs): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param id_: The ObjectId of the top-level object. :type id_: str :param object_type: The type of the ObjectType being added. :type object_type: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param user: The user adding this object. :type user: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_objects: bool :param tlo: The CRITs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type tlo: :class:`crits.core.crits_mongoengine.CritsBaseAttributes` :param is_sort_relationships: Return all relationships and meta, sorted :type is_sort_relationships: bool :param is_validate_only: Validate, but do not add to TLO. :type is_validate_only: bool :param is_validate_locally: Validate, but do not add b/c there is no TLO. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ # if object_type is a validated indicator type, then validate value if value: from crits.indicators.handlers import validate_indicator_value (value, error) = validate_indicator_value(value, object_type) if error: return {"success": False, "message": error} if is_validate_locally: # no TLO provided return {"success": True} if not tlo: if type_ and id_: tlo = class_from_id(type_, id_) if not tlo: return {'success': False, 'message': "Failed to find TLO"} try: if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename ret = tlo.add_object(object_type, value, source, method, reference, user) if not ret['success']: msg = '%s! [Type: "%s"][Value: "%s"]' return {"success": False, "message": msg % (ret['message'], object_type, value)} else: results = {'success': True} if not is_validate_only: # save the object tlo.update(add_to_set__obj=ret['object']) results['message'] = "Object added successfully" if file_: # do we have a pcap? if detect_pcap(data): handle_pcap_file(filename, data, source, user=user, related_id=id_, related_type=type_) else: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if add_indicator and not is_validate_only: campaign = tlo.campaign if hasattr(tlo, 'campaign') else None from crits.indicators.handlers import handle_indicator_ind ind_res = handle_indicator_ind(value, source, object_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, user, source_method=method, source_reference=reference, source_tlp=tlp, add_domain=True, campaign=campaign, cache=cache) if ind_res['success']: forge_relationship(class_=tlo, right_class=ind_res['object'], rel_type=RelationshipTypes.RELATED_TO, user=user) else: msg = "Object added, but failed to add Indicator.<br>Error: %s" results['message'] = msg % ind_res.get('message') if is_sort_relationships == True: results['relationships'] = tlo.sort_relationships(user, meta=True) if get_objects: results['objects'] = tlo.sort_objects() results['id'] = str(tlo.id) return results except ValidationError as e: return {'success': False, 'message': str(e)}
def add_object(type_, oid, object_type, name, source, method, reference, analyst, value=None, file_=None, add_indicator=False, get_objects=True, obj=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param oid: The ObjectId of the top-level object. :type oid: str :param object_type: The type of the ObjectType being added. :type object_type: str :param name: The name of the ObjectType being added. :type name: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param analyst: The user adding this object. :type analyst: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_object: bool :param obj: The CRITs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type obj: :class:`crits.core.crits_mongoengine.CritsBaseAttributes` :param is_validate_only: Only validate, do not add. :type is_validate_only: bool :param is_validate_locally: Only validate, do not add. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ results = {} if oid == None: oid = "" if obj == None: obj = class_from_id(type_, oid) if not obj: if is_validate_locally == True: # TODO: Perform some form of validation results['success'] = True return results else: results['message'] = "Could not find item to add object to." results['success'] = False return results if name == "URL" and "://" not in value.split('.')[0]: return { "success": False, "message": "URI - URL must contain protocol prefix (e.g. http://, https://, ftp://)" } elif object_type == "Address": if "ipv4" in name: try: validate_ipv4_address(value) except DjangoValidationError: return {"success": False, "message": "Invalid IPv4 address. "} elif "ipv6" in name: try: validate_ipv6_address(value) except DjangoValidationError: return {"success": False, "message": "Invalid IPv6 address. "} elif "cidr" in name: try: if '/' not in value: raise ValidationError("") cidr_parts = value.split('/') if int(cidr_parts[1]) < 0 or int(cidr_parts[1]) > 128: raise ValidationError("") if ':' not in cidr_parts[0] and int(cidr_parts[1]) > 32: raise ValidationError("") validate_ipv46_address(cidr_parts[0]) except (ValidationError, ValueError) as cidr_error: return {"success": False, "message": "Invalid CIDR address. "} try: cur_len = len(obj.obj) if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename obj.add_object(object_type, name, value, source, method, reference, analyst) if is_validate_only == False: obj.save(username=analyst) new_len = len(obj.obj) if new_len > cur_len: results['message'] = "Object added successfully!" results['success'] = True if file_: # do we have a pcap? if data[:4] in ('\xa1\xb2\xc3\xd4', '\xd4\xc3\xb2\xa1', '\x0a\x0d\x0d\x0a'): handle_pcap_file(filename, data, source, user=analyst, related_id=oid, related_type=type_) else: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if add_indicator and is_validate_only == False: from crits.indicators.handlers import handle_indicator_ind if object_type != name: object_type = "%s - %s" % (object_type, name) campaign = obj.campaign if hasattr(obj, 'campaign') else None ind_res = handle_indicator_ind(value, source, reference, object_type, analyst, method, add_domain=True, campaign=campaign, cache=cache) if ind_res['success']: ind = ind_res['object'] forge_relationship(left_class=obj, right_class=ind, rel_type="Related_To", analyst=analyst, get_rels=is_sort_relationships) else: results['message'] = "Object was added, but failed to add Indicator." \ "<br>Error: " + ind_res.get('message') if is_sort_relationships == True: if file_ or add_indicator: # does this line need to be here? # obj.reload() results['relationships'] = obj.sort_relationships( analyst, meta=True) else: results['relationships'] = obj.sort_relationships( analyst, meta=True) else: results[ 'message'] = "Object already exists! [Type: " + object_type + "][Value: " + value + "] " results['success'] = False if (get_objects): results['objects'] = obj.sort_objects() results['id'] = str(obj.id) return results except ValidationError, e: return {'success': False, 'message': str(e)}
def parse_cybox_object(self, cbx_obj, description='', ind_id=None): """ Parse a CybOX object form a STIX doc. An object can contain multiple related_objects, which in turn can have their own related_objects, so this handles those recursively. :param cbx_obj: The CybOX object to parse. :type cbx_obj: A CybOX object. :param description: Parent-level (e.g. Observable) description. :type description: str :param ind_id: The ID of a parent STIX Indicator. :type ind_id: str """ # check for missing attributes if not cbx_obj or not cbx_obj.properties: if cbx_obj.idref: # just a reference, so nothing to parse return else: cbx_id = getattr(cbx_obj, 'id_', 'None') self.failed.append(("No valid object_properties was found!", "Observable (%s)" % cbx_id, cbx_id)) # note for display in UI return # Don't parse if already been parsed # This is for artifacts that are related to CybOX File Objects if cbx_obj.id_ in self.parsed: return try: # try to create CRITs object from Cybox Object analyst = self.source_instance.analyst item = cbx_obj.properties val = cbx_obj.id_ if isinstance(item, Address) and not ind_id: if item.category in ('cidr', 'ipv4-addr', 'ipv4-net', 'ipv4-netmask', 'ipv6-addr', 'ipv6-net', 'ipv6-netmask'): imp_type = "IP" for value in item.address_value.values: val = str(value).strip() if self.preview: res = None else: iptype = get_crits_ip_type(item.category) if iptype: res = ip_add_update(val, iptype, [self.source], analyst=analyst, is_add_indicator=True) else: res = { 'success': False, 'reason': 'No IP Type' } self.parse_res(imp_type, val, cbx_obj, res, ind_id) if (not ind_id and (isinstance(item, DomainName) or (isinstance(item, URI) and item.type_ == 'Domain Name'))): imp_type = "Domain" for val in item.value.values: if self.preview: res = None else: res = upsert_domain(str(val), [self.source], username=analyst) self.parse_res(imp_type, str(val), cbx_obj, res, ind_id) elif isinstance(item, HTTPSession): imp_type = "RawData" val = cbx_obj.id_ try: c_req = item.http_request_response[0].http_client_request hdr = c_req.http_request_header if hdr.raw_header: data = hdr.raw_header.value title = "HTTP Header from STIX: %s" % self.package.id_ method = self.source_instance.method ref = self.source_instance.reference if self.preview: res = None val = title else: res = handle_raw_data_file(data, self.source.name, user=analyst, description=description, title=title, data_type="HTTP Header", tool_name="STIX", tool_version=None, method=method, reference=ref) else: imp_type = "Indicator" ind_type = "HTTP Request Header Fields - User-Agent" val = hdr.parsed_header.user_agent.value val = ','.join(val) if isinstance(val, list) else val if self.preview: res = None else: res = handle_indicator_ind( val, self.source, ind_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_relationship=True, description=description) except: msg = "Unsupported use of 'HTTPSession' object." res = {'success': False, 'reason': msg} self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, WhoisEntry): # No sure where else to put this imp_type = "RawData" val = cbx_obj.id_ if item.remarks: data = item.remarks.value title = "WHOIS Entry from STIX: %s" % self.package.id_ if self.preview: res = None val = title else: res = handle_raw_data_file( data, self.source.name, user=analyst, description=description, title=title, data_type="Text", tool_name="WHOIS", tool_version=None, method=self.source_instance.method, reference=self.source_instance.reference) else: msg = "Unsupported use of 'WhoisEntry' object." res = {'success': False, 'reason': msg} self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, Artifact): # Not sure if this is right, and I believe these can be # encoded in a couple different ways. imp_type = "RawData" val = cbx_obj.id_ rawdata = item.data.decode('utf-8') # TODO: find out proper ways to determine title, datatype, # tool_name, tool_version title = "Artifact for Event: STIX Document %s" % self.package.id_ if self.preview: res = None val = title else: res = handle_raw_data_file( rawdata, self.source.name, user=analyst, description=description, title=title, data_type="Text", tool_name="STIX", tool_version=None, method=self.source_instance.method, reference=self.source_instance.reference) self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif (isinstance(item, File) and item.custom_properties and item.custom_properties[0].name == "crits_type" and item.custom_properties[0]._value == "Certificate"): imp_type = "Certificate" val = str(item.file_name) data = None if self.preview: res = None else: for rel_obj in item.parent.related_objects: if isinstance(rel_obj.properties, Artifact): data = rel_obj.properties.data self.parsed.append(rel_obj.id_) res = handle_cert_file(val, data, self.source, user=analyst, description=description) self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, File) and self.has_network_artifact(item): imp_type = "PCAP" val = str(item.file_name) data = None if self.preview: res = None else: for rel_obj in item.parent.related_objects: if (isinstance(rel_obj.properties, Artifact) and rel_obj.properties.type_ == Artifact.TYPE_NETWORK): data = rel_obj.properties.data self.parsed.append(rel_obj.id_) res = handle_pcap_file(val, data, self.source, user=analyst, description=description) self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, File): imp_type = "Sample" md5 = item.md5 if md5: md5 = md5.lower() val = str(item.file_name or md5) # add sha1/sha256/ssdeep once handle_file supports it size = item.size_in_bytes data = None if item.file_path: path = "File Path: " + str(item.file_path) description += "\n" + path for rel_obj in item.parent.related_objects: if (isinstance(rel_obj.properties, Artifact) and rel_obj.properties.type_ == Artifact.TYPE_FILE): data = rel_obj.properties.data self.parsed.append(rel_obj.id_) if not md5 and not data and val and val != "None": imp_type = "Indicator" if self.preview: res = None else: res = handle_indicator_ind( val, self.source, "Win File", IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_domain=True, add_relationship=True, description=description) elif md5 or data: if self.preview: res = None else: res = handle_file(val, data, self.source, user=analyst, md5_digest=md5, is_return_only_md5=False, size=size, description=description) else: val = cbx_obj.id_ msg = "CybOX 'File' object has no MD5, data, or filename" res = {'success': False, 'reason': msg} self.parse_res(imp_type, val, cbx_obj, res, ind_id) elif isinstance(item, EmailMessage): imp_type = 'Email' id_list = [] data = {} val = cbx_obj.id_ get_attach = False data['raw_body'] = str(item.raw_body) data['raw_header'] = str(item.raw_header) data['helo'] = str(item.email_server) if item.header: data['subject'] = str(item.header.subject) if item.header.date: data['date'] = item.header.date.value val = "Date: %s, Subject: %s" % (data.get( 'date', 'None'), data['subject']) data['message_id'] = str(item.header.message_id) data['sender'] = str(item.header.sender) data['reply_to'] = str(item.header.reply_to) data['x_originating_ip'] = str( item.header.x_originating_ip) data['x_mailer'] = str(item.header.x_mailer) data['boundary'] = str(item.header.boundary) data['from_address'] = str(item.header.from_) if item.header.to: data['to'] = [str(r) for r in item.header.to.to_list()] if data.get('date'): # Email TLOs must have a date data['source'] = self.source.name data['source_method'] = self.source_instance.method data['source_reference'] = self.source_instance.reference if self.preview: res = None else: res = handle_email_fields(data, analyst, "STIX") self.parse_res(imp_type, val, cbx_obj, res, ind_id) if not self.preview and res.get('status'): id_list.append(cbx_obj.id_) # save ID for atchmnt rels get_attach = True else: # Can't be an Email TLO, so save fields for x, key in enumerate(data): if data[key] and data[key] != "None": if key in ('raw_header', 'raw_body'): if key == 'raw_header': title = "Email Header from STIX Email: %s" d_type = "Email Header" else: title = "Email Body from STIX Email: %s" d_type = "Email Body" imp_type = 'RawData' title = title % cbx_obj.id_ if self.preview: res = None else: res = handle_raw_data_file( data[key], self.source, analyst, description, title, d_type, "STIX", self.stix_version) self.parse_res(imp_type, title, cbx_obj, res, ind_id) elif key == 'to': imp_type = 'Target' for y, addr in enumerate(data[key]): tgt_dict = {'email_address': addr} if self.preview: res = None else: res = upsert_target(tgt_dict, analyst) if res['success']: get_attach = True tmp_obj = copy(cbx_obj) tmp_obj.id_ = '%s-%s-%s' % (cbx_obj.id_, x, y) self.parse_res(imp_type, addr, tmp_obj, res, ind_id) self.ind2obj.setdefault( cbx_obj.id_, []).append(tmp_obj.id_) id_list.append(tmp_obj.id_) else: imp_type = 'Indicator' if key in ('sender', 'reply_to', 'from_address'): ind_type = "Address - e-mail" elif 'ip' in key: ind_type = "Address - ipv4-addr" elif key == 'raw_body': ind_type = "Email Message" else: ind_type = "String" if self.preview: res = None else: res = handle_indicator_ind( data[key], self.source, ind_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_domain=True, add_relationship=True, description=description) if res['success']: get_attach = True tmp_obj = copy(cbx_obj) tmp_obj.id_ = '%s-%s' % (cbx_obj.id_, x) self.parse_res(imp_type, data[key], tmp_obj, res, ind_id) self.ind2obj.setdefault(cbx_obj.id_, []).append(tmp_obj.id_) id_list.append(tmp_obj.id_) if not self.preview: # Setup relationships between all Email attributes for oid in id_list: for oid2 in id_list: if oid != oid2: self.relationships.append( (oid, RelationshipTypes.RELATED_TO, oid2, "High")) # Should check for attachments and add them here. if get_attach and item.attachments: for attach in item.attachments: rel_id = attach.to_dict()['object_reference'] for oid in id_list: self.relationships.append( (oid, RelationshipTypes.CONTAINS, rel_id, "High")) else: # try to parse all other possibilities as Indicator imp_type = "Indicator" val = cbx_obj.id_ c_obj = make_crits_object(item) # Ignore what was already caught above if (ind_id or c_obj.object_type not in IPTypes.values()): ind_type = c_obj.object_type for val in [str(v).strip() for v in c_obj.value if v]: if ind_type: # handle domains mislabeled as URLs if c_obj.object_type == 'URI' and '/' not in val: ind_type = "Domain" if self.preview: res = None else: res = handle_indicator_ind( val, self.source, ind_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, analyst, add_domain=True, add_relationship=True, description=description) self.parse_res(imp_type, val, cbx_obj, res, ind_id) except Exception, e: # probably caused by cybox object we don't handle self.failed.append((e.message, "%s (%s)" % (imp_type, val), cbx_obj.id_)) # note for display in UI
def add_object(type_, oid, object_type, name, source, method, reference, analyst, value=None, file_=None, add_indicator=False, get_objects=True, obj=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param oid: The ObjectId of the top-level object. :type oid: str :param object_type: The type of the ObjectType being added. :type object_type: str :param name: The name of the ObjectType being added. :type name: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param analyst: The user adding this object. :type analyst: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_object: bool :param obj: The CRITs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type obj: :class:`crits.core.crits_mongoengine.CritsBaseAttributes` :param is_validate_only: Only validate, do not add. :type is_validate_only: bool :param is_validate_locally: Only validate, do not add. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ results = {} if oid == None: oid = "" if obj == None: obj = class_from_id(type_, oid) if not obj: if is_validate_locally == True: # TODO: Perform some form of validation results['success'] = True return results else: results['message'] = "Could not find item to add object to." results['success'] = False return results if name == "URL" and "://" not in value.split('.')[0]: return {"success" : False, "message" : "URI - URL must contain protocol prefix (e.g. http://, https://, ftp://)"} elif object_type == "Address": if "ipv4" in name: try: validate_ipv4_address(value) except DjangoValidationError: return {"success" : False, "message" : "Invalid IPv4 address. "} elif "ipv6" in name: try: validate_ipv6_address(value) except DjangoValidationError: return {"success" : False, "message" : "Invalid IPv6 address. "} elif "cidr" in name: try: if '/' not in value: raise ValidationError("") cidr_parts = value.split('/') if int(cidr_parts[1]) < 0 or int(cidr_parts[1]) > 128: raise ValidationError("") if ':' not in cidr_parts[0] and int(cidr_parts[1]) > 32: raise ValidationError("") validate_ipv46_address(cidr_parts[0]) except (ValidationError, ValueError) as cidr_error: return {"success" : False, "message" : "Invalid CIDR address. "} try: cur_len = len(obj.obj) if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename obj.add_object(object_type, name, value, source, method, reference, analyst) if is_validate_only == False: obj.save(username=analyst) new_len = len(obj.obj) if new_len > cur_len: results['message'] = "Object added successfully!" results['success'] = True if file_: # do we have a pcap? if data[:4] in ('\xa1\xb2\xc3\xd4', '\xd4\xc3\xb2\xa1', '\x0a\x0d\x0d\x0a'): handle_pcap_file(filename, data, source, user=analyst, related_id=oid, related_type=type_) else: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if add_indicator and is_validate_only == False: from crits.indicators.handlers import handle_indicator_ind if object_type != name: object_type = "%s - %s" % (object_type, name) campaign = obj.campaign if hasattr(obj, 'campaign') else None ind_res = handle_indicator_ind(value, source, reference, object_type, analyst, method, add_domain=True, campaign=campaign, cache=cache) if ind_res['success']: ind = ind_res['object'] forge_relationship(left_class=obj, right_class=ind, rel_type="Related_To", analyst=analyst, get_rels=is_sort_relationships) else: results['message'] = "Object was added, but failed to add Indicator." \ "<br>Error: " + ind_res.get('message') if is_sort_relationships == True: if file_ or add_indicator: # does this line need to be here? # obj.reload() results['relationships'] = obj.sort_relationships(analyst, meta=True) else: results['relationships'] = obj.sort_relationships(analyst, meta=True) else: results['message'] = "Object already exists! [Type: " + object_type + "][Value: " + value + "] " results['success'] = False if (get_objects): results['objects'] = obj.sort_objects() results['id'] = str(obj.id) return results except ValidationError, e: return {'success': False, 'message': str(e)}
def obj_create(self, bundle, **kwargs): """ Handles creating PCAPs through the API. :param bundle: Bundle containing the information to create the PCAP. :type bundle: Tastypie Bundle object. :returns: HttpResponse. """ user = bundle.request.user file_ = bundle.data.get('filedata', None) content = {'return_code': 1, 'type': 'PCAP'} if not file_: content['message'] = "Upload type of 'file' but no file uploaded." self.crits_response(content) filedata = file_.read() filename = str(file_) source = bundle.data.get('source', None) method = bundle.data.get('method', None) reference = bundle.data.get('reference', None) tlp = bundle.data.get('tlp', 'amber') description = bundle.data.get('description', None) relationship = bundle.data.get('relationship', None) related_id = bundle.data.get('related_id', None) related_md5 = bundle.data.get('related_md5', None) related_type = bundle.data.get('related_type', None) bucket_list = bundle.data.get('bucket_list', None) ticket = bundle.data.get('ticket', None) if not user.has_access_to(PCAPACL.WRITE): content[ 'message'] = 'User does not have permission to create Object.' self.crits_response(content) result = handle_pcap_file(filename, filedata, source, user, description, related_id=related_id, related_md5=related_md5, related_type=related_type, method=method, reference=reference, tlp=tlp, relationship=relationship, bucket_list=bucket_list, ticket=ticket) if result.get('message'): content['message'] = result.get('message') if result.get('id'): url = reverse('api_dispatch_detail', kwargs={ 'resource_name': 'pcaps', 'api_name': 'v1', 'pk': result.get('id') }) content['url'] = url content['id'] = result.get('id') if result['success']: content['return_code'] = 0 self.crits_response(content)
def pcap_tcpdump(pcap_md5, form, analyst): flag_list = [] cleaned_data = form.cleaned_data # Make sure we can find tcpdump sc = get_config('MetaCap') tcpdump_bin = str(sc['tcpdump']) if not os.path.exists(tcpdump_bin): tcpdump_output = "Could not find tcpdump!" return tcpdump_output # Make sure we have a PCAP to work with pcap = PCAP.objects(md5=pcap_md5).first() if not pcap: return "No PCAP found" pcap_data = pcap.filedata.read() if not pcap_data: return "Could not get PCAP from GridFS: %s" % pcap_md5 # Use the filename if it's there, otherwise the md5. # This is used for the description of the carved sample. if pcap.filename: pcap_filename = pcap.filename else: pcap_filename = pcap_md5 # Setup tcpdump arguments if cleaned_data['sequence']: flag_list.append("-S") if cleaned_data['timestamp']: flag_list.append("%s" % cleaned_data['timestamp']) if cleaned_data['verbose']: flag_list.append("%s" % cleaned_data['verbose']) if cleaned_data['data']: flag_list.append("%s" % cleaned_data['data']) # force -nN flag_list.append("-nN") # if we need to carve if cleaned_data['carve']: if not cleaned_data['bpf']: return "Must supply a BPF filter to carve." new_pcap = tempfile.NamedTemporaryFile(delete=False) flag_list.append("-w") flag_list.append(new_pcap.name) if cleaned_data['bpf']: flag_list.append('%s' % str(cleaned_data['bpf'].replace('"', ''))) # write PCAP to disk # temp_out collects stdout and stderr # temp_pcap is the pcap to read # new_pcap is the pcap being written if carving temp_out = tempfile.NamedTemporaryFile(delete=False) temp_pcap = tempfile.NamedTemporaryFile(delete=False) pcap_name = temp_pcap.name temp_pcap.write(pcap_data) temp_pcap.close() args = [tcpdump_bin, '-r', temp_pcap.name] + flag_list tcpdump = Popen(args, stdout=temp_out, stderr=STDOUT) tcpdump.communicate() out_name = temp_out.name temp_out.seek(0) tcpdump_output = '' for line in iter(temp_out): tcpdump_output += "%s" % line temp_out.close() #delete temp files os.unlink(pcap_name) os.unlink(out_name) if cleaned_data['carve']: new_pcap_data = new_pcap.read() if len(new_pcap_data) > 24: # pcap-ng will change this. m = hashlib.md5() m.update(new_pcap_data) md5 = m.hexdigest() org = get_user_organization(analyst) result = handle_pcap_file("%s.pcap" % md5, new_pcap_data, org, user=analyst, description="%s of %s" % (cleaned_data['bpf'], pcap_filename), parent_id=pcap.id, parent_type="PCAP", method="MetaCap Tcpdumper") if result['success']: tcpdump_output = "<a href=\"%s\">View new pcap.</a>" % reverse('crits.pcaps.views.pcap_details', args=[result['md5']]) else: tcpdump_output = result['message'] else: tcpdump_output = "No packets matched the filter." os.unlink(new_pcap.name) return tcpdump_output
def add_object(type_, id_, object_type, source, method, reference, user, value=None, file_=None, add_indicator=False, get_objects=True, tlo=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}, **kwargs): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param id_: The ObjectId of the top-level object. :type id_: str :param object_type: The type of the ObjectType being added. :type object_type: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param user: The user adding this object. :type user: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_object: bool :param tlo: The CRITs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type tlo: :class:`crits.core.crits_mongoengine.CritsBaseAttributes` :param is_validate_only: Validate, but do not add to TLO. :type is_validate_only: bool :param is_validate_locally: Validate, but do not add b/c there is no TLO. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ results = {} obj = tlo if id_ == None: id_ = "" if obj == None: obj = class_from_id(type_, id_) from crits.indicators.handlers import validate_indicator_value if value is not None: (value, error) = validate_indicator_value(value, object_type) if error: return {"success": False, "message": error} if is_validate_locally: # no obj provided results['success'] = True return results if not obj: results['message'] = "TLO could not be found" results['success'] = False return results try: cur_len = len(obj.obj) if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename obj.add_object(object_type, value, source, method, reference, user) if is_validate_only == False: obj.save(username=user) new_len = len(obj.obj) if new_len > cur_len: if not is_validate_only: results['message'] = "Object added successfully!" results['success'] = True if file_: # do we have a pcap? if data[:4] in ('\xa1\xb2\xc3\xd4', '\xd4\xc3\xb2\xa1', '\x0a\x0d\x0d\x0a'): handle_pcap_file(filename, data, source, user=user, related_id=id_, related_type=type_) else: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if add_indicator and is_validate_only == False: from crits.indicators.handlers import handle_indicator_ind campaign = obj.campaign if hasattr(obj, 'campaign') else None ind_res = handle_indicator_ind(value, source, object_type, IndicatorThreatTypes.UNKNOWN, IndicatorAttackTypes.UNKNOWN, user, method, reference, add_domain=True, campaign=campaign, cache=cache) if ind_res['success']: ind = ind_res['object'] forge_relationship(class_=obj, right_class=ind, rel_type=RelationshipTypes.RELATED_TO, user=user, get_rels=is_sort_relationships) else: results['message'] = "Object was added, but failed to add Indicator." \ "<br>Error: " + ind_res.get('message') if is_sort_relationships == True: if file_ or add_indicator: # does this line need to be here? # obj.reload() results['relationships'] = obj.sort_relationships( user, meta=True) else: results['relationships'] = obj.sort_relationships( user, meta=True) else: results[ 'message'] = "Object already exists! [Type: " + object_type + "][Value: " + value + "] " results['success'] = False if (get_objects): results['objects'] = obj.sort_objects() results['id'] = str(obj.id) return results except ValidationError, e: return {'success': False, 'message': str(e)}
def add_object(type_, oid, object_type, name, source, method, reference, analyst, value=None, file_=None, add_indicator=False, get_objects=True, indicator_campaign=None, indicator_campaign_confidence=None, obj=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param oid: The ObjectId of the top-level object. :type oid: str :param object_type: The type of the ObjectType being added. :type object_type: str :param name: The name of the ObjectType being added. :type name: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param analyst: The user adding this object. :type analyst: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_object: bool :param is_validate_only: Only validate, do not add. :type is_validate_only: bool :param is_validate_locally: Only validate, do not add. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :param obj: The CRITs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type obj: :class:`crits.core.crits_mongoengine.CritsBaseAttributes` :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ results = {} if oid == None: oid = "" if obj == None: obj = class_from_id(type_, oid) if not obj: if is_validate_locally == True: # TODO: Perform some form of validation results['success'] = True return results else: results['message'] = "Could not find item to add object to." results['success'] = False return results try: cur_len = len(obj.obj) if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename obj.add_object(object_type, name, value, source, method, reference, analyst) if is_validate_only == False: obj.save(username=analyst) new_len = len(obj.obj) if new_len > cur_len: results['message'] = "Object added successfully!" results['success'] = True if file_: # do we have a pcap? if data[:4] in ('\xa1\xb2\xc3\xd4', '\xd4\xc3\xb2\xa1', '\x0a\x0d\x0d\x0a'): handle_pcap_file(filename, data, source, user=analyst, related_id=oid, related_type=type_) else: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if add_indicator and is_validate_only == False: from crits.indicators.handlers import handle_indicator_ind if object_type != name: object_type = "%s - %s" % (object_type, name) ind_res = handle_indicator_ind( value, source, reference, object_type, analyst, method=method, add_domain=True, campaign=indicator_campaign, campaign_confidence=indicator_campaign_confidence, cache=cache) if ind_res['success']: ind = ind_res['object'] # Inherit campaigns from top level item when creating # an indicator from an object if no campaigns were specified if indicator_campaign == None and ind != None: for campaign in obj.campaign: ec = EmbeddedCampaign( name=campaign.name, confidence=campaign.confidence, description="", analyst=analyst, date=datetime.datetime.now()) ind.add_campaign(ec) ind.save(username=analyst) forge_relationship(left_class=obj, right_class=ind, rel_type="Related_To", analyst=analyst, get_rels=is_sort_relationships) if is_sort_relationships == True: if file_ or add_indicator: # does this line need to be here? # obj.reload() results['relationships'] = obj.sort_relationships( analyst, meta=True) else: results['relationships'] = obj.sort_relationships( analyst, meta=True) else: results[ 'message'] = "Object already exists! [Type: " + object_type + "][Value: " + value + "] " results['success'] = False if (get_objects): results['objects'] = obj.sort_objects() return results except ValidationError, e: return {'success': False, 'message': e}
def finish_task(self, task): """ Finish a task. """ logger.debug("Finishing task %s" % task) self.update_task(task) obj = class_from_type(task.context.crits_type) query = self.get_db_query(task.context) sample = obj.objects(__raw__=query).first() if task.files: logger.debug("Adding samples") for f in task.files: logger.debug("Adding %s" % f['filename']) #TODO: add in backdoor?, user from crits.samples.handlers import handle_file handle_file(f['filename'], f['data'], sample.source, related_md5=task.context.identifier, campaign=sample.campaign, method=task.service.name, relationship=f['relationship'], user=task.context.username, ) else: logger.debug("No samples to add.") if task.certificates: logger.debug("Adding certificates") for f in task.certificates: logger.debug("Adding %s" % f['filename']) from crits.certificates.handlers import handle_cert_file # XXX: Add campaign from source? handle_cert_file(f['filename'], f['data'], sample.source, related_md5=task.context.identifier, related_type=task.context.crits_type, method=task.service.name, relationship=f['relationship'], user=task.context.username, ) else: logger.debug("No certificates to add.") if task.pcaps: logger.debug("Adding PCAPs") for f in task.pcaps: logger.debug("Adding %s" % f['filename']) from crits.pcaps.handlers import handle_pcap_file # XXX: Add campaign from source? handle_pcap_file(f['filename'], f['data'], sample.source, related_md5=task.context.identifier, related_type=task.context.crits_type, method=task.service.name, relationship=f['relationship'], user=task.context.username, ) else: logger.debug("No PCAPs to add.")
def add_object(type_, oid, object_type, name, source, method, reference, analyst, value=None, file_=None, add_indicator=False, get_objects=True, indicator_campaign=None, indicator_campaign_confidence=None, obj=None, is_sort_relationships=False, is_validate_only=False, is_validate_locally=False, cache={}): """ Add an object to the database. :param type_: The top-level object type. :type type_: str :param oid: The ObjectId of the top-level object. :type oid: str :param object_type: The type of the ObjectType being added. :type object_type: str :param name: The name of the ObjectType being added. :type name: str :param source: The name of the source adding this object. :type source: str :param method: The method for this object. :type method: str :param reference: The reference for this object. :type reference: str :param analyst: The user adding this object. :type analyst: str :param value: The value of the object. :type value: str :param file_: The file if the object is a file upload. :type file_: file handle. :param add_indicator: Also add an indicator for this object. :type add_indicator: bool :param get_objects: Return the formatted list of objects when completed. :type get_object: bool :param is_validate_only: Only validate, do not add. :type is_validate_only: bool :param is_validate_locally: Only validate, do not add. :type is_validate_locally: bool :param cache: Cached data, typically for performance enhancements during bulk operations. :type cache: dict :param obj: The CRITs top-level object we are adding objects to. This is an optional parameter used mainly for performance reasons (by not querying mongo if we already have the top level-object). :type obj: :class:`crits.core.crits_mongoengine.CritsBaseAttributes` :returns: dict with keys: "success" (boolean), "message" (str), "objects" (list), "relationships" (list) """ results = {} if oid == None: oid = "" if obj == None: obj = class_from_id(type_, oid) if not obj: if is_validate_locally == True: # TODO: Perform some form of validation results['success'] = True return results else: results['message'] = "Could not find item to add object to." results['success'] = False return results try: cur_len = len(obj.obj) if file_: data = file_.read() filename = file_.name md5sum = md5(data).hexdigest() value = md5sum reference = filename obj.add_object(object_type, name, value, source, method, reference, analyst) if is_validate_only == False: obj.save(username=analyst) new_len = len(obj.obj) if new_len > cur_len: results['message'] = "Object added successfully!" results['success'] = True if file_: # do we have a pcap? if data[:4] in ('\xa1\xb2\xc3\xd4', '\xd4\xc3\xb2\xa1', '\x0a\x0d\x0d\x0a'): handle_pcap_file(filename, data, source, user=analyst, parent_id=oid, parent_type=type_) else: #XXX: MongoEngine provides no direct GridFS access so we # need to use pymongo directly. col = settings.COL_OBJECTS grid = mongo_connector("%s.files" % col) if grid.find({'md5': md5sum}).count() == 0: put_file(filename, data, collection=col) if add_indicator and is_validate_only == False: from crits.indicators.handlers import handle_indicator_ind if object_type != name: object_type = "%s - %s" % (object_type, name) ind_res = handle_indicator_ind(value, source, reference, object_type, analyst, method=method, add_domain=True, campaign=indicator_campaign, campaign_confidence=indicator_campaign_confidence, cache=cache) if ind_res['success']: ind = ind_res['object'] # Inherit campaigns from top level item when creating # an indicator from an object if no campaigns were specified if indicator_campaign == None and ind != None: for campaign in obj.campaign: ec = EmbeddedCampaign(name=campaign.name, confidence=campaign.confidence, description="", analyst=analyst, date=datetime.datetime.now()) ind.add_campaign(ec) ind.save(username=analyst) forge_relationship(left_class=obj, right_class=ind, rel_type="Related_To", analyst=analyst, get_rels=is_sort_relationships) if is_sort_relationships == True: if file_ or add_indicator: # does this line need to be here? # obj.reload() results['relationships'] = obj.sort_relationships(analyst, meta=True) else: results['relationships'] = obj.sort_relationships(analyst, meta=True) else: results['message'] = "Object already exists! [Type: " + object_type + "][Value: " + value + "] " results['success'] = False if (get_objects): results['objects'] = obj.sort_objects() return results except ValidationError, e: return {'success': False, 'message': e}
def pcap_tcpdump(pcap_md5, form, analyst): flag_list = [] cleaned_data = form.cleaned_data # Make sure we can find tcpdump sc = get_config("MetaCap") tcpdump_bin = str(sc["tcpdump"]) if not os.path.exists(tcpdump_bin): tcpdump_output = "Could not find tcpdump!" return tcpdump_output # Make sure we have a PCAP to work with pcap = PCAP.objects(md5=pcap_md5).first() if not pcap: return "No PCAP found" pcap_data = pcap.filedata.read() if not pcap_data: return "Could not get PCAP from GridFS: %s" % pcap_md5 # Use the filename if it's there, otherwise the md5. # This is used for the description of the carved sample. if pcap.filename: pcap_filename = pcap.filename else: pcap_filename = pcap_md5 # Setup tcpdump arguments if cleaned_data["sequence"]: flag_list.append("-S") if cleaned_data["timestamp"]: flag_list.append("%s" % cleaned_data["timestamp"]) if cleaned_data["verbose"]: flag_list.append("%s" % cleaned_data["verbose"]) if cleaned_data["data"]: flag_list.append("%s" % cleaned_data["data"]) # force -nN flag_list.append("-nN") # if we need to carve if cleaned_data["carve"]: if not cleaned_data["bpf"]: return "Must supply a BPF filter to carve." new_pcap = tempfile.NamedTemporaryFile(delete=False) flag_list.append("-w") flag_list.append(new_pcap.name) if cleaned_data["bpf"]: flag_list.append("%s" % str(cleaned_data["bpf"].replace('"', ""))) # write PCAP to disk # temp_out collects stdout and stderr # temp_pcap is the pcap to read # new_pcap is the pcap being written if carving temp_out = tempfile.NamedTemporaryFile(delete=False) temp_pcap = tempfile.NamedTemporaryFile(delete=False) pcap_name = temp_pcap.name temp_pcap.write(pcap_data) temp_pcap.close() args = [tcpdump_bin, "-r", temp_pcap.name] + flag_list tcpdump = Popen(args, stdout=temp_out, stderr=STDOUT) tcpdump.communicate() out_name = temp_out.name temp_out.seek(0) tcpdump_output = "" for line in iter(temp_out): tcpdump_output += "%s" % line temp_out.close() # delete temp files os.unlink(pcap_name) os.unlink(out_name) if cleaned_data["carve"]: new_pcap_data = new_pcap.read() if len(new_pcap_data) > 24: # pcap-ng will change this. m = hashlib.md5() m.update(new_pcap_data) md5 = m.hexdigest() org = get_user_organization(analyst) result = handle_pcap_file( "%s.pcap" % md5, new_pcap_data, org, user=analyst, description="%s of %s" % (cleaned_data["bpf"], pcap_filename), parent_id=pcap.id, parent_type="PCAP", method="MetaCap Tcpdumper", ) if result["success"]: tcpdump_output = '<a href="%s">View new pcap.</a>' % reverse( "crits.pcaps.views.pcap_details", args=[result["md5"]] ) else: tcpdump_output = result["message"] else: tcpdump_output = "No packets matched the filter." os.unlink(new_pcap.name) return tcpdump_output
def finish_task(self, task): """ Finish a task. """ logger.debug("Finishing task %s" % task) self.update_task(task) obj = class_from_type(task.context.crits_type) query = self.get_db_query(task.context) sample = obj.objects(__raw__=query).first() if task.files: logger.debug("Adding samples") for f in task.files: logger.debug("Adding %s" % f['filename']) #TODO: add in backdoor?, user from crits.samples.handlers import handle_file handle_file(f['filename'], f['data'], sample.source, parent_md5=task.context.identifier, campaign=sample.campaign, method=task.service.name, relationship=f['relationship'], user=task.context.username, ) else: logger.debug("No samples to add.") if task.certificates: logger.debug("Adding certificates") for f in task.certificates: logger.debug("Adding %s" % f['filename']) from crits.certificates.handlers import handle_cert_file # XXX: Add campaign from source? handle_cert_file(f['filename'], f['data'], sample.source, parent_md5=task.context.identifier, parent_type=task.context.crits_type, method=task.service.name, relationship=f['relationship'], user=task.context.username, ) else: logger.debug("No certificates to add.") if task.pcaps: logger.debug("Adding PCAPs") for f in task.pcaps: logger.debug("Adding %s" % f['filename']) from crits.pcaps.handlers import handle_pcap_file # XXX: Add campaign from source? handle_pcap_file(f['filename'], f['data'], sample.source, parent_md5=task.context.identifier, parent_type=task.context.crits_type, method=task.service.name, relationship=f['relationship'], user=task.context.username, ) else: logger.debug("No PCAPs to add.")