def export_domain(domain): domain_obj = Domain.Domain(domain) dict_metadata = domain_obj.get_domain_metadata(tags=True) dict_metadata['ports'] = ['80', '223', '443'] # create domain-ip obj obj = MISPObject('domain-ip', standalone=True) obj.first_seen = dict_metadata['first_seen'] obj.last_seen = dict_metadata['last_check'] l_obj_attr = [] l_obj_attr.append( obj.add_attribute('first-seen', value=dict_metadata['first_seen'])) l_obj_attr.append( obj.add_attribute('last-seen', value=dict_metadata['last_check'])) l_obj_attr.append(obj.add_attribute('domain', value=domain)) for port in dict_metadata['ports']: l_obj_attr.append(obj.add_attribute('port', value=port)) # add tags if dict_metadata['tags']: tag_misp_object_attributes(l_obj_attr, dict_metadata['tags']) #print(obj.to_json()) return obj
def handle_process(self, properties): attributes = [] if properties.creation_time: attributes.append(["datetime", properties.creation_time.value, "creation-time"]) if properties.start_time: attributes.append(["datetime", properties.creation_time.value, "start-time"]) attribute_type = "text" if properties.name: attributes.append([attribute_type, properties.name.value, "name"]) if properties.pid: attributes.append([attribute_type, properties.pid.value, "pid"]) if properties.parent_pid: attributes.append([attribute_type, properties.parent_pid.value, "parent-pid"]) if properties.child_pid_list: for child in properties.child_pid_list: attributes.append([attribute_type, child.value, "child-pid"]) # if properties.port_list: # for port in properties.port_list: # attributes.append(["src-port", port.port_value.value, "port"]) if properties.network_connection_list: references = [] for connection in properties.network_connection_list: object_name, object_attributes, _ = self.handle_network_connection(connection) object_uuid = str(uuid.uuid4()) misp_object = MISPObject(object_name) misp_object.uuid = object_uuid for attribute in object_attributes: misp_object.add_attribute(**attribute) references.append(object_uuid) return "process", self.return_attributes(attributes), {"process_uuid": references} return "process", self.return_attributes(attributes), ""
def export_ail_item(item_id, tags=[]): dict_metadata = Item.get_item({ 'id': item_id, 'date': True, 'tags': True, 'raw_content': True })[0] # force tags for tag in tags: if tag not in dict_metadata['tags']: dict_metadata['tags'].append(tag) #obj = MISPObject('ail-item', standalone=True) obj = MISPObject('ail-leak', standalone=True) obj.first_seen = dict_metadata['date'] l_obj_attr = [] l_obj_attr.append( obj.add_attribute('first-seen', value=dict_metadata['date'])) l_obj_attr.append( obj.add_attribute('raw-data', value=item_id, data=dict_metadata['raw_content'])) l_obj_attr.append(obj.add_attribute('sensor', value=Export.get_ail_uuid())) # add tags if dict_metadata['tags']: tag_misp_object_attributes(l_obj_attr, dict_metadata['tags']) return obj
def observable_pe(self, observable): extension = observable['1']['extensions']['windows-pebinary-ext'] sections = extension['sections'] pe = MISPObject('pe') pe_uuid = str(uuid.uuid4()) pe.uuid = pe_uuid self.fill_object_attributes_observable(pe, pe_mapping, extension) for section in sections: pe_section = MISPObject('pe-section') if 'hashes' in section: for h_type, h_value in section['hashes'].items(): h_type = h_type.lower().replace('-', '') pe_section.add_attribute( **{ 'type': h_type, 'object_relation': h_type, 'value': h_value, 'to_ids': False }) self.fill_object_attributes_observable(pe_section, pe_section_mapping, section) section_uuid = str(uuid.uuid4()) pe_section.uuid = section_uuid pe.add_reference(section_uuid, 'included-in') self.misp_event.add_object(**pe_section) self.misp_event.add_object(**pe) return observable_file(observable), pe_uuid
def load(self): objects = [] with open(self.csv_path, newline='') as csvfile: reader = csv.reader(csvfile) if self.has_fieldnames: # The file has fieldnames, we either ignore it, or validate its validity fieldnames = [f.strip().lower() for f in reader.__next__()] if not self.fieldnames: self.fieldnames = fieldnames if not self.fieldnames: raise Exception(f'No fieldnames, impossible to create objects.') else: # Check if the CSV file has a header, and if it matches with the object template tmp_object = MISPObject(self.template_name) allowed_fieldnames = list(tmp_object._definition['attributes'].keys()) for fieldname in self.fieldnames: if fieldname not in allowed_fieldnames: raise Exception(f'{fieldname} is not a valid object relation for {self.template_name}: {allowed_fieldnames}') for row in reader: tmp_object = MISPObject(self.template_name) for object_relation, value in zip(self.fieldnames, row): tmp_object.add_attribute(object_relation, value=value) objects.append(tmp_object) return objects
def parse_pe(self, properties): misp_object = MISPObject('pe') filename = properties.file_name.value for attr in ('internal-filename', 'original-filename'): misp_object.add_attribute(**dict(zip(('type', 'value', 'object_relation'),('filename', filename, attr)))) if properties.headers: headers = properties.headers header_object = MISPObject('pe-section') if headers.entropy: header_object.add_attribute(**{"type": "float", "object_relation": "entropy", "value": headers.entropy.value.value}) file_header = headers.file_header misp_object.add_attribute(**{"type": "counter", "object_relation": "number-sections", "value": file_header.number_of_sections.value}) for h in file_header.hashes: hash_type, hash_value, hash_relation = self.handle_hashes_attribute(h) header_object.add_attribute(**{"type": hash_type, "value": hash_value, "object_relation": hash_relation}) if file_header.size_of_optional_header: header_object.add_attribute(**{"type": "size-in-bytes", "object_relation": "size-in-bytes", "value": file_header.size_of_optional_header.value}) self.misp_event.add_object(**header_object) misp_object.add_reference(header_object.uuid, 'included-in') if properties.sections: for section in properties.sections: section_uuid = self.parse_pe_section(section) misp_object.add_reference(section_uuid, 'included-in') self.misp_event.add_object(**misp_object) return {"pe_uuid": misp_object.uuid}
def add_sample(self): """Add the sample/target of the analysis""" target = self.report.get("target", {}) category = target.get("category", "") if not category: log.warning("Could not find info about the sample " "in the report, skipping") return False if category == "file": log.debug("Sample is a file, uploading it") self.read_malware() file_o, bin_type_o, bin_section_li = make_binary_objects( pseudofile=io.BytesIO(self.malware_binary), filename=target["file"]["name"], ) file_o.comment = "Submitted sample" # fix categories for obj in filter(None, ( file_o, bin_type_o, *bin_section_li, )): for attr in obj.attributes: if attr.type in PAYLOAD_DELIVERY: attr.category = "Payload delivery" self.event.add_object(obj) elif category == "url": log.debug("Sample is a URL") o = MISPObject(name='url') o.add_attribute('url', target['url']) o.add_attribute('text', "Submitted URL") self.event.add_object(o)
def to_misp_object(self, tag: bool) -> MISPObject: obj = MISPObject(name="url") operations = None if self.operations: operations = "Operations: " + ", ".join(self.operations) attr = obj.add_attribute( "url", value=self.url, comment=operations, category="External analysis", to_ids=False, ) if tag: self.tag_artifact_attribute(attr) if self.domain: obj.add_attribute("domain", self.domain, category="External analysis", to_ids=False) for ip in self.ips: obj.add_attribute("ip", ip, category="External analysis", to_ids=False) return obj
def to_misp(self) -> List[MISPObject]: """MISP JSON output""" to_return = [] for rsa_key in self.rsa_keys: to_return.append(rsa_key.to_misp()) for ecdsa_curve in self.ecdsa_curves: to_return.append(ecdsa_curve.to_misp()) if self.keys: crypto_obj = MISPObject("crypto-material", standalone=False) for k in self.keys: crypto_obj.add_attribute("type", k[0]) crypto_obj.add_attribute("generic-symmetric-key", k[1]) to_return.append(crypto_obj) if self.passwords: credential_obj = MISPObject("credential", standalone=False) for password in self.passwords: credential_obj.add_attribute("password", password) to_return.append(credential_obj) if self.mutexes: mutex_obj = MISPObject("mutex", standalone=False) for mutex in self.mutexes: mutex_obj.add_attribute("name", mutex) to_return.append(mutex_obj) for netloc in self.network_locations: to_return.append(netloc.to_misp()) # TODO self.dropped_filenames for email in self.emails: obj = MISPObject("email", standalone=False) obj.add_attribute("to", email) to_return.append(obj) return to_return
def _parse_ssl_certificate(self, certificate): x509 = MISPObject('x509') fingerprint = 'x509-fingerprint-sha1' x509.add_attribute(fingerprint, type=fingerprint, value=certificate['fingerprint']) x509_mapping = { 'subject': { 'name': ('text', 'subject') }, 'issuer': { 'common_name': ('text', 'issuer') }, 'signature': { 'serial': ('text', 'serial-number') }, 'validity': { 'valid_from': ('datetime', 'validity-not-before'), 'valid_to': ('datetime', 'validity-not-after') } } certificate = certificate['details'] for feature, subfeatures in x509_mapping.items(): for subfeature, mapping in subfeatures.items(): attribute_type, relation = mapping x509.add_attribute(relation, type=attribute_type, value=certificate[feature][subfeature]) x509.add_reference(self.attribute.uuid, 'seen-by') self.misp_event.add_object(**x509)
def parse(self): value = self.attribute.value.split( '|')[0] if '|' in self.attribute.type else self.attribute.value try: results = self.pdns.query(value) except Exception: self.result = { 'error': 'There is an authentication error, please make sure you supply correct credentials.' } return if not results: self.result = {'error': 'Not found'} return mapping = { 'count': 'counter', 'origin': 'text', 'time_first': 'datetime', 'rrtype': 'text', 'rrname': 'text', 'rdata': 'text', 'time_last': 'datetime' } for result in results: pdns_object = MISPObject('passive-dns') for relation, attribute_type in mapping.items(): pdns_object.add_attribute(relation, type=attribute_type, value=result[relation]) pdns_object.add_reference(self.attribute.uuid, 'associated-to') self.misp_event.add_object(**pdns_object)
def parse_ip(self, ip, recurse=False): req = requests.get(self.base_url.format('ip-address'), params={ 'apikey': self.apikey, 'ip': ip }, proxies=self.proxies) if req.status_code != 200: return req.status_code req = req.json() if req.get('asn'): asn_mapping = { 'network': ('ip-src', 'subnet-announced'), 'country': ('text', 'country') } asn_object = MISPObject('asn') asn_object.add_attribute('asn', type='AS', value=req['asn']) for key, value in asn_mapping.items(): if req.get(key): attribute_type, relation = value asn_object.add_attribute(relation, type=attribute_type, value=req[key]) self.misp_event.add_object(**asn_object) uuid = self.parse_resolutions( req['resolutions']) if req.get('resolutions') else None return self.parse_related_urls(req, recurse, uuid)
def parse_fileinfo(self): fileinfo = self.data['fileinfo'] file_object = MISPObject('file') self.analysisinfo_uuid = file_object.uuid for field in file_object_fields: file_object.add_attribute( field, **{ 'type': field, 'value': fileinfo[field] }) for field, mapping in file_object_mapping.items(): attribute_type, object_relation = mapping file_object.add_attribute( object_relation, **{ 'type': attribute_type, 'value': fileinfo[field] }) arch = self.data['generalinfo']['arch'] if arch in arch_type_mapping: to_call = arch_type_mapping[arch] getattr(self, to_call)(fileinfo, file_object) else: self.misp_event.add_object(**file_object)
def parse_registryactivities(self, process_uuid, registryactivities): if registryactivities['keyCreated']: for call in registryactivities['keyCreated']['call']: self.attributes['regkey'][call['path']].add( (process_uuid, 'creates')) for feature, relationship in registry_references_mapping.items(): if registryactivities[feature]: for call in registryactivities[feature]['call']: registry_key = MISPObject('registry-key') for field, mapping in regkey_object_mapping.items(): attribute_type, object_relation = mapping registry_key.add_attribute( object_relation, **{ 'type': attribute_type, 'value': call[field] }) registry_key.add_attribute( 'data-type', **{ 'type': 'text', 'value': 'REG_{}'.format(call['type'].upper()) }) self.misp_event.add_object(**registry_key) self.references[process_uuid].append( dict(referenced_uuid=registry_key.uuid, relationship_type=relationship))
def parse_system_behavior(self): system = self.data['behavior']['system'] if system.get('processes'): process_activities = { 'fileactivities': self.parse_fileactivities, 'registryactivities': self.parse_registryactivities } for process in system['processes']['process']: general = process['general'] process_object = MISPObject('process') for feature, relation in process_object_fields.items(): process_object.add_attribute( relation, **{ 'type': 'text', 'value': general[feature] }) start_time = datetime.strptime( '{} {}'.format(general['date'], general['time']), '%d/%m/%Y %H:%M:%S') process_object.add_attribute( 'start-time', **{ 'type': 'datetime', 'value': start_time }) self.misp_event.add_object(**process_object) for field, to_call in process_activities.items(): if process.get(field): to_call(process_object.uuid, process[field]) self.references[self.analysisinfo_uuid].append( dict(referenced_uuid=process_object.uuid, relationship_type='calls')) self.process_references[( general['targetid'], general['path'])] = process_object.uuid
def create_complex_event(self): event = MISPEvent() event.info = 'Complex Event' event.distribution = Distribution.all_communities event.add_tag('tlp:white') event.add_attribute('ip-src', '8.8.8.8') event.add_attribute('ip-dst', '8.8.8.9') event.add_attribute('domain', 'google.com') event.add_attribute('md5', '3c656da41f4645f77e3ec3281b63dd43') event.attributes[0].distribution = Distribution.your_organisation_only event.attributes[1].distribution = Distribution.this_community_only event.attributes[2].distribution = Distribution.connected_communities event.attributes[0].add_tag('tlp:red') event.attributes[1].add_tag('tlp:amber') event.attributes[2].add_tag('tlp:green') obj = MISPObject('file') obj.distribution = Distribution.connected_communities obj.add_attribute('filename', 'testfile') obj.add_attribute('md5', '3c656da41f4645f77e3ec3281b63dd44') obj.attributes[0].distribution = Distribution.your_organisation_only event.add_object(obj) return event
def _get_dns_info(self, rrecord): aka_cust_object = MISPObject('misc') tagInfo=["source:AkamaiETP"] _text = "" dimensions = ['deviceId','site'] for dimension in dimensions: #_result = self._run_custom_request(self, rrecord, dimension) session = requests.Session() session.auth = EdgeGridAuth( client_token = self.ctoken, client_secret = self.csecret, access_token = self.atoken ) confID = self.configID epoch_time = int(time.time()) last_30_days = epoch_time - 3600 * 24 * 30 # last month by default for now url = f'/etp-report/v2/configs/{str(confID)}' + \ f'/dns-activities/aggregate?cardinality=2500&dimension={dimension}&endTimeSec={epoch_time}&filters' + \ f'=%7B%22domain%22:%7B%22in%22:%5B%22{rrecord}%22%5D%7D%7D&startTimeSec={last_30_days}' _result = session.get(urljoin(self.baseurl, url)).json() if _result['dimension']['total'] != 0: _text += dimension + ' involved\n\n' if 'aggregations' in _result: for el in _result['aggregations']: name = el['name'] _text += f"{name} : {el['total']} connections \n" aka_cust_object.add_attribute('Customer Attribution', type='text', value=str(_text), Tag=tagInfo, disable_correlation=True) self.incident_flag = "true" self.misp_event.add_object(**aka_cust_object)
def parse_vt_object(self, query_result): vt_object = MISPObject('virustotal-report') vt_object.add_attribute('permalink', type='link', value=query_result['permalink']) detection_ratio = '{}/{}'.format(query_result['positives'], query_result['total']) vt_object.add_attribute('detection-ratio', type='text', value=detection_ratio) self.misp_event.add_object(**vt_object) return vt_object.uuid
def __parse_weakness(self, vulnerability_uuid): cwe_string, cwe_id = self.vulnerability['cwe'].split('-') cwes = requests.get(self.api_url.replace('/cve/', '/cwe')) if cwes.status_code == 200: for cwe in cwes.json(): if cwe['id'] == cwe_id: weakness_object = MISPObject('weakness') weakness_object.add_attribute( 'id', { 'type': 'weakness', 'value': f'{cwe_string}-{cwe_id}' }) for feature, relation in self.weakness_mapping.items(): if cwe.get(feature): weakness_object.add_attribute( relation, **{ 'type': 'text', 'value': cwe[feature] }) self.misp_event.add_object(weakness_object) self.references[vulnerability_uuid].append({ 'referenced_uuid': weakness_object.uuid, 'relationship_type': 'weakened-by' }) break
def parse_domain(self, domain, recurse=False): req = requests.get(self.base_url.format('domain'), params={ 'apikey': self.apikey, 'domain': domain }, proxies=self.proxies) if req.status_code != 200: return req.status_code req = req.json() hash_type = 'sha256' whois = 'whois' feature_types = { 'communicating': 'communicates-with', 'downloaded': 'downloaded-from', 'referrer': 'referring' } siblings = (self.parse_siblings(domain) for domain in req['domain_siblings']) uuid = self.parse_resolutions( req['resolutions'], req['subdomains'] if 'subdomains' in req else None, siblings) for feature_type, relationship in feature_types.items(): for feature in ('undetected_{}_samples', 'detected_{}_samples'): for sample in req.get(feature.format(feature_type), [])[:self.limit]: status_code = self.parse_hash(sample[hash_type], False, uuid, relationship) if status_code != 200: return status_code if req.get(whois): whois_object = MISPObject(whois) whois_object.add_attribute('text', type='text', value=req[whois]) self.misp_event.add_object(**whois_object) return self.parse_related_urls(req, recurse, uuid)
def load(self): objects = [] with open(self.csv_path, newline='') as csvfile: reader = csv.reader(csvfile) if self.has_fieldnames: # The file has fieldnames, we either ignore it, or validate its validity fieldnames = [f.strip().lower() for f in reader.__next__()] if not self.fieldnames: self.fieldnames = fieldnames if not self.fieldnames: raise Exception( f'No fieldnames, impossible to create objects.') else: # Check if the CSV file has a header, and if it matches with the object template tmp_object = MISPObject(self.template_name) allowed_fieldnames = list( tmp_object._definition['attributes'].keys()) for fieldname in self.fieldnames: if fieldname not in allowed_fieldnames: raise Exception( f'{fieldname} is not a valid object relation for {self.template_name}: {allowed_fieldnames}' ) for row in reader: tmp_object = MISPObject(self.template_name) for object_relation, value in zip(self.fieldnames, row): tmp_object.add_attribute(object_relation, value=value) objects.append(tmp_object) return objects
def export_domain(domain): domain_obj = Domain.Domain(domain) dict_metadata = domain_obj.get_domain_metadata(tags=True) # create domain-ip obj obj = MISPObject('domain-crawled', standalone=True) obj.first_seen = dict_metadata['first_seen'] obj.last_seen = dict_metadata['last_check'] l_obj_attr = [] l_obj_attr.append(obj.add_attribute('domain', value=domain)) dict_all_url = Domain.get_domain_all_url(domain, domain_obj.get_domain_type()) for crawled_url in dict_all_url: attribute = obj.add_attribute('url', value=crawled_url) attribute.first_seen = str(dict_all_url[crawled_url]['first_seen']) attribute.last_seen = str(dict_all_url[crawled_url]['last_seen']) l_obj_attr.append(attribute) # add tags if dict_metadata['tags']: tag_misp_object_attributes(l_obj_attr, dict_metadata['tags']) #print(obj.to_json()) return obj
def __get_object_cve(self, item, cve): attributes = [] object_cve = MISPObject('vulnerability') object_cve.add_attribute('id', cve) object_cve.add_attribute('state', 'Published') if type(item['ip']) is list: for ip in item['ip']: attributes.extend( list( filter(lambda x: x['value'] == ip, self.misp_event['Attribute']))) for obj in self.misp_event['Object']: attributes.extend( list( filter(lambda x: x['value'] == ip, obj['Attribute']))) if type(item['ip']) is str: for obj in self.misp_event['Object']: for att in obj['Attribute']: if att['value'] == item['ip']: object_cve.add_reference(obj['uuid'], 'cve') self.misp_event.add_object(object_cve)
def parse_hash(self, sample, recurse=False, uuid=None, relationship=None): req = requests.get(self.base_url.format('file'), params={ 'apikey': self.apikey, 'resource': sample }, proxies=self.proxies) status_code = req.status_code if req.status_code == 200: req = req.json() vt_uuid = self.parse_vt_object(req) file_attributes = [] for hash_type in ('md5', 'sha1', 'sha256'): if req.get(hash_type): file_attributes.append({ 'type': hash_type, 'object_relation': hash_type, 'value': req[hash_type] }) if file_attributes: file_object = MISPObject('file') for attribute in file_attributes: file_object.add_attribute(**attribute) file_object.add_reference(vt_uuid, 'analyzed-with') if uuid and relationship: file_object.add_reference(uuid, relationship) self.misp_event.add_object(**file_object) return status_code
def add_network(self, proto=None): """ Add UDP/TCP traffic proto must be one of "tcp", "udp" """ network = self.report.get("network", []) li_conn = network.get(proto, []) if not li_conn: log.info(f"No {proto} connection found in the report, skipping") return False from_to = [] # sort by time to get the "first packet seen" right li_conn.sort(key=lambda x: x["time"]) for conn in li_conn: src = conn['src'] dst = conn['dst'] sport = conn['sport'] dport = conn['dport'] if (src, sport, dst, dport) in from_to: continue from_to.append((src, sport, dst, dport)) o = MISPObject(name='network-connection') o.add_attribute('ip-src', src) o.add_attribute('ip-dst', dst) o.add_attribute('src-port', sport) o.add_attribute('dst-port', dport) o.add_attribute('layer3-protocol', "IP") o.add_attribute('layer4-protocol', proto.upper()) o.add_attribute('first-packet-seen', conn['time']) self.event.add_object(o)
def parse_and_insert_dnsdbflex(data: str): """Parse and validate the more simplier dndsdbflex output data. Parameters ---------- data as a string Returns ------- A dict with either the error message or the data which may be sent off the the caller of handler() Raises -------- none """ objects = [] try: entries = ndjson.loads(data) for entry in entries: # iterate over all ndjson lines # validate here (simple validation or full JSON Schema validation) if not validate_dnsdbflex(entry): return { "error": "Could not validate the dnsdbflex input '%s'" % entry } # Next, extract some fields rrtype = entry['rrtype'].upper() rrname = entry['rrname'].rstrip('.') # create a new MISP object, based on the passive-dns object for each nd-JSON line try: o = MISPObject(name='passive-dns', standalone=False, distribution=0, comment='DNSDBFLEX import by cof2misp') o.add_attribute('rrtype', value=rrtype, distribution=0, comment='DNSDBFLEX import by cof2misp') o.add_attribute('rrname', value=rrname, distribution=0, comment='DNSDBFLEX import by cof2misp') except Exception as ex: print("could not create object. Reason: %s" % str(ex)) # # add dnsdbflex entry to MISP object # objects.append(o.to_json()) r = {'results': {'Object': [json.loads(o) for o in objects]}} except Exception as ex: misperrors[ "error"] = "An error occured during parsing of input: '%s'" % ( str(ex), ) return misperrors return r
def parse_url_analysis(self): generalinfo = self.data["generalinfo"] url_object = MISPObject("url") self.analysisinfo_uuid = url_object.uuid url_object.add_attribute("url", generalinfo["target"]["url"]) self.misp_event.add_object(**url_object)
def test_to_dict_json_format(self): misp_event = MISPEvent() av_signature_object = MISPObject("av-signature") av_signature_object.add_attribute("signature", "EICAR") av_signature_object.add_attribute("software", "ClamAv") misp_event.add_object(av_signature_object) self.assertEqual(json.loads(misp_event.to_json()), misp_event.to_dict(json_format=True))
def _create_vt_object(virustotal): vt_object = MISPObject('virustotal-report') for key, vt_type, relation in zip(vt_keys, vt_types, vt_relations): vt_object.add_attribute( relation, **{ 'type': vt_type, 'value': virustotal[key] }) return vt_object
def __get_object_domain_ip(self, obs, relation): objet_domain_ip = MISPObject('domain-ip') objet_domain_ip.add_attribute(relation, obs) relation_attr = self.__get_relation_attribute() if relation_attr: objet_domain_ip.add_attribute(relation_attr, self.attribute['value']) objet_domain_ip.add_reference(self.attribute['uuid'], 'related-to') self.misp_event.add_object(objet_domain_ip)
def to_misp(self) -> MISPObject: mo = MISPObject("crypto-material", standalone=False) mo.add_attribute("type", "RSA") mo.add_attribute("origin", "malware-extraction") mo.add_attribute("modulus", hex(self.n)[2:]) mo.add_attribute("e", self.e) if self.d is not None: mo.add_attribute("d", self.d) return mo
def add_hashes(self): if self.args.filename is None and self.args.md5 is None and self.args.sha1 is None and self.args.sha256 is None: if not __sessions__.is_attached_file(True): self.log('error', "Not attached to a file, please set the hashes manually.") return False file_object = MISPObject('file') file_object.add_attribute('filename', value=__sessions__.current.file.name, comment=__sessions__.current.file.tags) file_object.add_attribute('md5', value=__sessions__.current.file.md5, comment=__sessions__.current.file.tags) file_object.add_attribute('sha1', value=__sessions__.current.file.sha1, comment=__sessions__.current.file.tags) file_object.add_attribute('sha256', value=__sessions__.current.file.sha256, comment=__sessions__.current.file.tags) __sessions__.current.misp_event.event.add_object(file_object) else: if self.args.filename: if self.args.md5: __sessions__.current.misp_event.event.add_attribute('filename|md5', '{}|{}'.format( self.args.filename, self.args.md5)) if self.args.sha1: __sessions__.current.misp_event.event.add_attribute('filename|sha1', '{}|{}'.format( self.args.filename, self.args.sha1)) if self.args.sha256: __sessions__.current.misp_event.event.add_attribute('filename|sha256', '{}|{}'.format( self.args.filename, self.args.sha256)) else: if self.args.md5: __sessions__.current.misp_event.event.add_attribute('md5', self.args.md5) if self.args.sha1: __sessions__.current.misp_event.event.add_attribute('sha1', self.args.sha1) if self.args.sha256: __sessions__.current.misp_event.event.add_attribute('sha256', self.args.sha256) self._change_event()
def check_hashes(self): if self.offline_mode: self.log('error', 'Offline mode, unable to query VirusTotal') return event_id = self._get_eventid() if event_id is None: return event = self.misp.get(event_id) if self._has_error_message(event): return misp_event = MISPEvent() misp_event.load(event) hashes_to_expand = {} hashes_expanded = [] # Thoses hashes are known and already processed local_samples_hashes = [] partial_objects = {} for o in misp_event.Object: if o.name != 'file': continue if o.has_attributes_by_relation(['md5', 'sha1', 'sha256']): # This object has all the hashes we care about tmphashes = [] tmphashes += [h.value for h in o.get_attributes_by_relation('md5')] tmphashes += [h.value for h in o.get_attributes_by_relation('sha1')] tmphashes += [h.value for h in o.get_attributes_by_relation('sha256')] # Make sure to query VT for the sha256, even if expanded locally hashes_to_expand[o.get_attributes_by_relation('sha256')[0].value] = o.get_attributes_by_relation('sha256')[0] if o.has_attributes_by_relation(['malware-sample']): # ... and it has a malware sample local_samples_hashes += tmphashes hashes_expanded += tmphashes elif o.has_attributes_by_relation(['malware-sample']): # This object has a malware sample, but is missing hashes. We can expand locally. # get the MD5 from the malware-sample attribute malware_sample = o.get_attributes_by_relation('malware-sample')[0] # at most one sample/file object local_samples_hashes.append(malware_sample.value.split('|')[1]) local_samples_hashes += [h.value for h in o.get_attributes_by_relation('md5')] local_samples_hashes += [h.value for h in o.get_attributes_by_relation('sha1')] local_samples_hashes += [h.value for h in o.get_attributes_by_relation('sha256')] if self.args.populate: # The object is missing hashes, keeping track of it for expansion if it isn't already done. partial_objects[o.uuid] = malware_sample else: sha256 = {attribute.value: attribute for attribute in o.get_attributes_by_relation('sha256')} sha1 = {attribute.value: attribute for attribute in o.get_attributes_by_relation('sha1')} md5 = {attribute.value: attribute for attribute in o.get_attributes_by_relation('md5')} if sha256: hashes_to_expand.update(sha256) elif sha1: hashes_to_expand.update(sha1) elif md5: hashes_to_expand.update(md5) for ref_uuid, sample in partial_objects.items(): if sample.value.split('|')[1] in hashes_expanded: # Already expanded in an other object continue new_obj, hashes = self._expand_local_sample(pseudofile=sample.malware_binary, filename=sample.value.split('|')[0], refobj=ref_uuid, default_attributes_paramaters=sample) misp_event.Object += new_obj local_samples_hashes += hashes # Make sure to query VT for the sha256, even if expanded locally hashes_to_expand[hashes[0]] = sample hashes_expanded += local_samples_hashes for a in misp_event.attributes: if a.type == 'malware-sample' and a.value.split('|')[1] not in hashes_expanded: new_obj, hashes = self._expand_local_sample(pseudofile=a.malware_binary, filename=a.value.split('|')[0], default_attributes_paramaters=a) misp_event.Object += new_obj local_samples_hashes += hashes # Make sure to query VT for the sha256, even if expanded locally hashes_to_expand[hashes[0]] = a elif a.type in ('filename|md5', 'filename|sha1', 'filename|sha256'): # We don't care if the hashes are in hashes_expanded or hashes_to_expand: they are firtered out later anyway fname, hashval = a.value.split('|') hashes_to_expand[hashval] = a elif a.type in ('md5', 'sha1', 'sha256'): # We don't care if the hashes are in hashes_expanded or hashes_to_expand: they are firtered out later anyway hashes_to_expand[a.value] = a unk_vt_hashes = [] if cfg.virustotal.virustotal_has_private_key is False: quota = 4 timeout = datetime.datetime.now() + datetime.timedelta(minutes=1) hashes_expanded += local_samples_hashes processed_on_vt = [] # Make sure to start getting reports for the longest possible hashes (reduce risks of collisions) for to_expand in sorted(list(set(hashes_to_expand)), key=len): if to_expand in processed_on_vt: # Always run VT, once per sample continue original_attribute = hashes_to_expand[to_expand] if original_attribute.get('object_id'): original_object_id = original_attribute.get('object_id') vt_object = self._make_VT_object(to_expand, original_attribute) if not vt_object: unk_vt_hashes.append(to_expand) continue result = vt_object.get_report() md5 = result['md5'] sha1 = result['sha1'] sha256 = result['sha256'] processed_on_vt += [sha256, sha1, md5] if all(h in local_samples_hashes for h in [md5, sha1, sha256]): self.log('success', 'Sample available in MISP:') else: self.log('success', 'Sample available in VT:') self.log('item', '{}\n\t{}\n\t{}\n\t{}'.format(result["permalink"], md5, sha1, sha256)) if self.args.populate: if not all(h in hashes_expanded for h in [md5, sha1, sha256]): # If all the "new" expanded hashes are in the hashes_expanded list, skip file_object = MISPObject('file', default_attributes_paramaters=original_attribute) file_object.add_attribute('md5', value=md5) file_object.add_attribute('sha1', value=sha1) file_object.add_attribute('sha256', value=sha256) file_object.add_reference(vt_object.uuid, 'analysed-with') misp_event.Object.append(file_object) hashes_expanded += [md5, sha1, sha256] else: if not original_object_id or original_object_id == '0': # Not an object, but the hashes are in an other object, skipping continue else: # We already have a MISP object, adding the link to the new VT object file_object = misp_event.get_object_by_id(original_object_id) file_object.add_reference(vt_object.uuid, 'analysed-with') misp_event.Object.append(vt_object) if cfg.virustotal.virustotal_has_private_key is False: if quota > 0: quota -= 1 else: waiting_time = (timeout - datetime.datetime.now()).seconds if waiting_time > 0: self.log('warning', 'No private API key, 4 queries/min is the limit. Waiting for {} seconds.'.format(waiting_time)) time.sleep(waiting_time) quota = 4 timeout = datetime.datetime.now() + datetime.timedelta(minutes=1) if self.args.populate: self._populate(misp_event) if len(unk_vt_hashes) > 0: self.log('error', 'Unknown on VT:') for h in unk_vt_hashes: self.log('item', '{}'.format(h))