def parse_pe(self, properties): misp_object = MISPObject('pe') filename = properties.file_name.value for attr in ('internal-filename', 'original-filename'): misp_object.add_attribute(**dict(zip(('type', 'value', 'object_relation'),('filename', filename, attr)))) if properties.headers: headers = properties.headers header_object = MISPObject('pe-section') if headers.entropy: header_object.add_attribute(**{"type": "float", "object_relation": "entropy", "value": headers.entropy.value.value}) file_header = headers.file_header misp_object.add_attribute(**{"type": "counter", "object_relation": "number-sections", "value": file_header.number_of_sections.value}) for h in file_header.hashes: hash_type, hash_value, hash_relation = self.handle_hashes_attribute(h) header_object.add_attribute(**{"type": hash_type, "value": hash_value, "object_relation": hash_relation}) if file_header.size_of_optional_header: header_object.add_attribute(**{"type": "size-in-bytes", "object_relation": "size-in-bytes", "value": file_header.size_of_optional_header.value}) self.misp_event.add_object(**header_object) misp_object.add_reference(header_object.uuid, 'included-in') if properties.sections: for section in properties.sections: section_uuid = self.parse_pe_section(section) misp_object.add_reference(section_uuid, 'included-in') self.misp_event.add_object(**misp_object) return {"pe_uuid": misp_object.uuid}
def observable_pe(self, observable): extension = observable['1']['extensions']['windows-pebinary-ext'] sections = extension['sections'] pe = MISPObject('pe') pe_uuid = str(uuid.uuid4()) pe.uuid = pe_uuid self.fill_object_attributes_observable(pe, pe_mapping, extension) for section in sections: pe_section = MISPObject('pe-section') if 'hashes' in section: for h_type, h_value in section['hashes'].items(): h_type = h_type.lower().replace('-', '') pe_section.add_attribute( **{ 'type': h_type, 'object_relation': h_type, 'value': h_value, 'to_ids': False }) self.fill_object_attributes_observable(pe_section, pe_section_mapping, section) section_uuid = str(uuid.uuid4()) pe_section.uuid = section_uuid pe.add_reference(section_uuid, 'included-in') self.misp_event.add_object(**pe_section) self.misp_event.add_object(**pe) return observable_file(observable), pe_uuid
def parse_passivedns_results(self, query_response): default_fields = ('count', 'rrname', 'rrname') optional_fields = ('bailiwick', 'time_first', 'time_last', 'zone_time_first', 'zone_time_last') for query_type, results in query_response.items(): comment = self.comment % (query_type, self.type_to_feature[ self.attribute['type']], self.attribute['value']) for result in results: passivedns_object = MISPObject('passive-dns') for feature in default_fields: passivedns_object.add_attribute(**self._parse_attribute( comment, feature, result[feature])) for feature in optional_fields: if result.get(feature): passivedns_object.add_attribute( **self._parse_attribute(comment, feature, result[feature])) if isinstance(result['rdata'], list): for rdata in result['rdata']: passivedns_object.add_attribute( **self._parse_attribute(comment, 'rdata', rdata)) else: passivedns_object.add_attribute(**self._parse_attribute( comment, 'rdata', result['rdata'])) passivedns_object.add_reference(self.attribute['uuid'], 'related-to') self.misp_event.add_object(passivedns_object)
def parse_vulnerabilities(self, vulnerabilities): for vulnerability in vulnerabilities: vulnerability_object = MISPObject('vulnerability') for feature in ('id', 'summary', 'Modified', 'Published', 'cvss'): if vulnerability.get(feature): attribute = {'value': vulnerability[feature]} attribute.update(self.vulnerability_mapping[feature]) vulnerability_object.add_attribute(**attribute) if vulnerability.get('Published'): vulnerability_object.add_attribute( **{ 'type': 'text', 'object_relation': 'state', 'value': 'Published' }) for feature in ('references', 'vulnerable_configuration', 'vulnerable_configuration_cpe_2_2'): if vulnerability.get(feature): for value in vulnerability[feature]: if isinstance(value, dict): value = value['title'] attribute = {'value': value} attribute.update(self.vulnerability_mapping[feature]) vulnerability_object.add_attribute(**attribute) vulnerability_object.add_reference(self.attribute['uuid'], 'related-to') self.misp_event.add_object(vulnerability_object)
def monthly_flask_o18(self, event, location, csv_file, update): data = self.split_data_comment(csv_file, update, event) dates_already_imported = [] if update: # get all datetime from existing event for obj in event.get_objects_by_name('scrippsco2-o18-monthly'): date_attribute = obj.get_attributes_by_relation('sample-datetime')[0] dates_already_imported.append(date_attribute.value) reader = csv.reader(data) for row in reader: if not row[0].isdigit(): # This file has f****d up headers continue sample_date = parse(f'{row[0]}-{row[1]}-16T00:00:00') if sample_date in dates_already_imported: continue obj = MISPObject('scrippsco2-o18-monthly', standalone=False) obj.add_attribute('sample-datetime', sample_date) obj.add_attribute('sample-date-excel', float(row[2])) obj.add_attribute('sample-date-fractional', float(row[3])) obj.add_attribute('monthly-o18', float(row[4])) obj.add_attribute('monthly-o18-seasonal-adjustment', float(row[5])) obj.add_attribute('monthly-o18-smoothed', float(row[6])) obj.add_attribute('monthly-o18-smoothed-seasonal-adjustment', float(row[7])) obj.add_reference(location, 'sampling-location') event.add_object(obj)
def parse_hash(self, sample, recurse=False, uuid=None, relationship=None): req = requests.get(self.base_url.format('file'), params={ 'apikey': self.apikey, 'resource': sample }, proxies=self.proxies) status_code = req.status_code if req.status_code == 200: req = req.json() vt_uuid = self.parse_vt_object(req) file_attributes = [] for hash_type in ('md5', 'sha1', 'sha256'): if req.get(hash_type): file_attributes.append({ 'type': hash_type, 'object_relation': hash_type, 'value': req[hash_type] }) if file_attributes: file_object = MISPObject('file') for attribute in file_attributes: file_object.add_attribute(**attribute) file_object.add_reference(vt_uuid, 'analyzed-with') if uuid and relationship: file_object.add_reference(uuid, relationship) self.misp_event.add_object(**file_object) return status_code
def __get_object_cve(self, item, cve): attributes = [] object_cve = MISPObject('vulnerability') object_cve.add_attribute('id', cve) object_cve.add_attribute('state', 'Published') if type(item['ip']) is list: for ip in item['ip']: attributes.extend( list( filter(lambda x: x['value'] == ip, self.misp_event['Attribute']))) for obj in self.misp_event['Object']: attributes.extend( list( filter(lambda x: x['value'] == ip, obj['Attribute']))) if type(item['ip']) is str: for obj in self.misp_event['Object']: for att in obj['Attribute']: if att['value'] == item['ip']: object_cve.add_reference(obj['uuid'], 'cve') self.misp_event.add_object(object_cve)
def daily_flask_o18(self, event, location, csv_file, update): data = self.split_data_comment(csv_file, update, event) dates_already_imported = [] if update: # get all datetime from existing event for obj in event.get_objects_by_name('scrippsco2-o18-daily'): date_attribute = obj.get_attributes_by_relation('sample-datetime')[0] dates_already_imported.append(date_attribute.value) reader = csv.reader(data) for row in reader: sample_date = parse(f'{row[0]}-{row[1]}') if sample_date in dates_already_imported: continue obj = MISPObject('scrippsco2-o18-daily', standalone=False) obj.add_attribute('sample-datetime', sample_date) obj.add_attribute('sample-date-excel', float(row[2])) obj.add_attribute('sample-date-fractional', float(row[3])) obj.add_attribute('number-flask', int(row[4])) obj.add_attribute('flag', int(row[5])) attr = obj.add_attribute('o18-value', float(row[6])) attr.add_tag(f'scrippsco2-fgi:{int(row[5])}') obj.add_reference(location, 'sampling-location') event.add_object(obj)
def parse_resolutions(self, resolutions, subdomains=None, uuids=None): domain_ip_object = MISPObject('domain-ip') if self.attribute.type in ('domain', 'hostname'): domain_ip_object.add_attribute('domain', type='domain', value=self.attribute.value) attribute_type, relation, key = ('ip-dst', 'ip', 'ip_address') else: domain_ip_object.add_attribute('ip', type='ip-dst', value=self.attribute.value) attribute_type, relation, key = ('domain', 'domain', 'hostname') for resolution in resolutions: domain_ip_object.add_attribute(relation, type=attribute_type, value=resolution[key]) if subdomains: for subdomain in subdomains: attribute = MISPAttribute() attribute.from_dict(**dict(type='domain', value=subdomain)) self.misp_event.add_attribute(**attribute) domain_ip_object.add_reference(attribute.uuid, 'subdomain') if uuids: for uuid in uuids: domain_ip_object.add_reference(uuid, 'sibling-of') self.misp_event.add_object(**domain_ip_object) return domain_ip_object.uuid
def _parse_ssl_certificate(self, certificate): x509 = MISPObject('x509') fingerprint = 'x509-fingerprint-sha1' x509.add_attribute(fingerprint, type=fingerprint, value=certificate['fingerprint']) x509_mapping = { 'subject': { 'name': ('text', 'subject') }, 'issuer': { 'common_name': ('text', 'issuer') }, 'signature': { 'serial': ('text', 'serial-number') }, 'validity': { 'valid_from': ('datetime', 'validity-not-before'), 'valid_to': ('datetime', 'validity-not-after') } } certificate = certificate['details'] for feature, subfeatures in x509_mapping.items(): for subfeature, mapping in subfeatures.items(): attribute_type, relation = mapping x509.add_attribute(relation, type=attribute_type, value=certificate[feature][subfeature]) x509.add_reference(self.attribute.uuid, 'seen-by') self.misp_event.add_object(**x509)
def parse(self): value = self.attribute.value.split( '|')[0] if '|' in self.attribute.type else self.attribute.value try: results = self.pdns.query(value) except Exception: self.result = { 'error': 'There is an authentication error, please make sure you supply correct credentials.' } return if not results: self.result = {'error': 'Not found'} return mapping = { 'count': 'counter', 'origin': 'text', 'time_first': 'datetime', 'rrtype': 'text', 'rrname': 'text', 'rdata': 'text', 'time_last': 'datetime' } for result in results: pdns_object = MISPObject('passive-dns') for relation, attribute_type in mapping.items(): pdns_object.add_attribute(relation, type=attribute_type, value=result[relation]) pdns_object.add_reference(self.attribute.uuid, 'associated-to') self.misp_event.add_object(**pdns_object)
def __get_object_domain_ip(self, obs, relation): objet_domain_ip = MISPObject('domain-ip') objet_domain_ip.add_attribute(relation, obs) relation_attr = self.__get_relation_attribute() if relation_attr: objet_domain_ip.add_attribute(relation_attr, self.attribute['value']) objet_domain_ip.add_reference(self.attribute['uuid'], 'related-to') self.misp_event.add_object(objet_domain_ip)
def handle_object_case(self, attribute_type, attribute_value, compl_data): misp_object = MISPObject(attribute_type) for attribute in attribute_value: misp_object.add_attribute(**attribute) if type(compl_data) is dict and "pe_uuid" in compl_data: # if some complementary data is a dictionary containing an uuid, # it means we are using it to add an object reference of a pe object # in a file object misp_object.add_reference(compl_data['pe_uuid'], 'pe') self.misp_event.add_object(**misp_object)
def _parse_dns(self, value): dns_result = self._api_call(f'{self.base_url}/resolve/{value}') if dns_result and dns_result['Passive'].get('records'): itype, ftype, value = self._fetch_types(dns_result['Passive']['query']) misp_object = MISPObject('domain-ip') misp_object.add_attribute(itype, value) for record in dns_result['Passive']['records']: misp_object.add_attribute(ftype, record['value']) misp_object.add_reference(self.attribute.uuid, 'related-to') self.misp_event.add_object(**misp_object)
def _handle_certificate(self, certificate, ip_uuid): x509 = MISPObject('x509') x509.add_attribute(self.cert_hash, type=self.cert_hash, value=certificate) cert_details = self.pssl.fetch_cert(certificate) info = cert_details['info'] for feature, mapping in self.mapping.items(): attribute_type, object_relation = mapping x509.add_attribute(object_relation, type=attribute_type, value=info[feature]) x509.add_attribute(self.cert_type, type='text', value=self.cert_type) x509.add_reference(ip_uuid, 'seen-by') self.misp_event.add_object(**x509)
def handle_object_case(self, attribute_type, attribute_value, compl_data, object_uuid=None): misp_object = MISPObject(attribute_type) if object_uuid: misp_object.uuid = object_uuid for attribute in attribute_value: misp_object.add_attribute(**attribute) if type(compl_data) is dict and "pe_uuid" in compl_data: # if some complementary data is a dictionary containing an uuid, # it means we are using it to add an object reference misp_object.add_reference(compl_data['pe_uuid'], 'included-in') self.misp_event.add_object(**misp_object)
def test_obj_references_export(self): self.init_event() obj1 = MISPObject(name="file") obj2 = MISPObject(name="url", standalone=False) obj1.add_reference(obj2, "downloads") obj2.add_reference(obj1, "downloaded-by") self.assertFalse("ObjectReference" in obj1.jsonable()) self.assertTrue("ObjectReference" in obj2.jsonable()) self.mispevent.add_object(obj1) obj2.standalone = True self.assertTrue("ObjectReference" in obj1.jsonable()) self.assertFalse("ObjectReference" in obj2.jsonable())
def parse_passivedns_results(self, query_response): for query_type, results in query_response.items(): comment = self.comment % (query_type, TYPE_TO_FEATURE[self.attribute['type']], self.attribute['value']) for result in results: passivedns_object = MISPObject('passive-dns') if result.get('rdata') and isinstance(result['rdata'], list): for rdata in result.pop('rdata'): passivedns_object.add_attribute(**self._parse_attribute(comment, 'rdata', rdata)) for feature, value in result.items(): passivedns_object.add_attribute(**self._parse_attribute(comment, feature, value)) passivedns_object.add_reference(self.attribute['uuid'], 'related-to') self.misp_event.add_object(passivedns_object)
def __get_object_domain_ip(self, obj_to_add): if (obj_to_add['type'] == 'Ip' and self.attribute['type'] in ['hostname', 'domain']) or \ (obj_to_add['type'] in ('Hostname', 'Domain') and self.attribute['type'] in ('ip-src', 'ip-dst')): domain_ip_object = MISPObject('domain-ip') domain_ip_object.add_attribute(self.__get_relation(obj_to_add), obj_to_add['value']) domain_ip_object.add_attribute( self.__get_relation(self.attribute, is_yeti_object=False), self.attribute['value']) domain_ip_object.add_reference(self.attribute['uuid'], 'related_to') return domain_ip_object
def __misp_add_vt_to_URLObject(self, obj: MISPObject) -> Optional[MISPObject]: urls = obj.get_attributes_by_relation('url') url = urls[0] self.vt.url_lookup(url.value) report = self.vt.get_url_lookup(url.value) if not report: return None vt_obj = MISPObject('virustotal-report', standalone=False) vt_obj.add_attribute('first-submission', value=datetime.fromtimestamp(report['attributes']['first_submission_date']), disable_correlation=True) vt_obj.add_attribute('last-submission', value=datetime.fromtimestamp(report['attributes']['last_submission_date']), disable_correlation=True) vt_obj.add_attribute('permalink', value=f"https://www.virustotal.com/gui/url/{report['id']}/detection", disable_correlation=True) obj.add_reference(vt_obj, 'analysed-with') return vt_obj
def _handle_dns_record(self, item, record_type, relationship): dns_record = MISPObject('dns-record') dns_record.add_attribute('queried-domain', type='domain', value=item['host']) attribute_type, feature = ('ip-dst', 'ip') if record_type == 'A' else ('domain', 'target') dns_record.add_attribute(f'{record_type.lower()}-record', type=attribute_type, value=item[feature]) dns_record.add_reference(self.attribute.uuid, relationship) self.misp_event.add_object(**dns_record)
def _parse_vulnerability(self, value): vulnerability_result = self._api_call(f'{self.base_url}/vulnerabilities/search/{value}') if vulnerability_result: for vulnerability in vulnerability_result: misp_object = MISPObject('vulnerability') for code in vulnerability['stdcode']: misp_object.add_attribute('id', code) for feature, relation in zip(('title', 'description', 'temporal_score'), ('summary', 'description', 'cvss-score')): misp_object.add_attribute(relation, vulnerability[feature]) for reference in vulnerability['references']: misp_object.add_attribute('references', reference['link_target']) misp_object.add_reference(self.attribute.uuid, 'related-to') self.misp_event.add_object(**misp_object)
def create_response(original_attribute: dict, software: str, signature: Optional[str] = None) -> dict: misp_event = MISPEvent() if signature: misp_event.add_attribute(**original_attribute) av_signature_object = MISPObject("av-signature") av_signature_object.add_attribute("signature", signature) av_signature_object.add_attribute("software", software) av_signature_object.add_reference(original_attribute["uuid"], "belongs-to") misp_event.add_object(av_signature_object) event = json.loads(misp_event.to_json()) results = {key: event[key] for key in ('Attribute', 'Object') if (key in event and event[key])} return {"results": results}
def get_certificate_object(cert, attribute): parsed = cert['parsed'] cert_object = MISPObject('x509') cert_object.add_attribute('x509-fingerprint-sha256', value=parsed['fingerprint_sha256']) cert_object.add_attribute('x509-fingerprint-sha1', value=parsed['fingerprint_sha1']) cert_object.add_attribute('x509-fingerprint-md5', value=parsed['fingerprint_md5']) cert_object.add_attribute('serial-number', value=parsed['serial_number']) cert_object.add_attribute('version', value=parsed['version']) cert_object.add_attribute('subject', value=parsed['subject_dn']) cert_object.add_attribute('issuer', value=parsed['issuer_dn']) cert_object.add_attribute('validity-not-before', value=isoparse(parsed['validity']['start'])) cert_object.add_attribute('validity-not-after', value=isoparse(parsed['validity']['end'])) cert_object.add_attribute('self_signed', value=parsed['signature']['self_signed']) cert_object.add_attribute( 'signature_algorithm', value=parsed['signature']['signature_algorithm']['name']) cert_object.add_attribute( 'pubkey-info-algorithm', value=parsed['subject_key_info']['key_algorithm']['name']) if 'rsa_public_key' in parsed['subject_key_info']: pub_key = parsed['subject_key_info']['rsa_public_key'] cert_object.add_attribute('pubkey-info-size', value=pub_key['length']) cert_object.add_attribute('pubkey-info-exponent', value=pub_key['exponent']) hex_mod = codecs.encode(base64.b64decode(pub_key['modulus']), 'hex').decode() cert_object.add_attribute('pubkey-info-modulus', value=hex_mod) if "extensions" in parsed and "subject_alt_name" in parsed["extensions"]: san = parsed["extensions"]["subject_alt_name"] if "dns_names" in san: for dns in san['dns_names']: cert_object.add_attribute('dns_names', value=dns) if "ip_addresses" in san: for ip in san['ip_addresses']: cert_object.add_attribute('ip', value=ip) if "raw" in cert: cert_object.add_attribute('raw-base64', value=cert['raw']) cert_object.add_reference(attribute.uuid, 'associated-to') return cert_object
def parse_url(self, url, recurse=False, uuid=None): req = requests.get(self.base_url.format('url'), params={'apikey': self.apikey, 'resource': url}) status_code = req.status_code if req.status_code == 200: req = req.json() vt_uuid = self.parse_vt_object(req) if not recurse: feature = 'url' url_object = MISPObject(feature) url_object.add_attribute(feature, type=feature, value=url) url_object.add_reference(vt_uuid, 'analyzed-with') if uuid: url_object.add_reference(uuid, 'hosted-in') self.misp_event.add_object(**url_object) return status_code
def __get_object_url(self, obj_to_add): if (obj_to_add['type'] == 'Url' and self.attribute['type'] in [ 'hostname', 'domain', 'ip-src', 'ip-dst' ]) or (obj_to_add['type'] in ('Hostname', 'Domain', 'Ip') and self.attribute['type'] == 'url'): url_object = MISPObject('url') obj_relation = self.__get_relation(obj_to_add) if obj_relation: url_object.add_attribute(obj_relation, obj_to_add['value']) obj_relation = self.__get_relation(self.attribute, is_yeti_object=False) if obj_relation: url_object.add_attribute(obj_relation, self.attribute['value']) url_object.add_reference(self.attribute['uuid'], 'related_to') return url_object
def __get_object_ns_record(self, obj_to_add, link): queried_domain = None ns_domain = None object_dns_record = MISPObject('dns-record') if link == 'dst': queried_domain = self.attribute['value'] ns_domain = obj_to_add['value'] elif link == 'src': queried_domain = obj_to_add['value'] ns_domain = self.attribute['value'] if queried_domain and ns_domain: object_dns_record.add_attribute('queried-domain', queried_domain) object_dns_record.add_attribute('ns-record', ns_domain) object_dns_record.add_reference(self.attribute['uuid'], 'related_to') return object_dns_record
def parse_result(attribute, values): event = MISPEvent() initial_attribute = MISPAttribute() initial_attribute.from_dict(**attribute) event.add_attribute(**initial_attribute) mapping = {'asn': ('AS', 'asn'), 'prefix': ('ip-src', 'subnet-announced')} print(values) for last_seen, response in values['response'].items(): asn = MISPObject('asn') asn.add_attribute('last-seen', **{'type': 'datetime', 'value': last_seen}) for feature, attribute_fields in mapping.items(): attribute_type, object_relation = attribute_fields asn.add_attribute(object_relation, **{'type': attribute_type, 'value': response[feature]}) asn.add_reference(initial_attribute.uuid, 'related-to') event.add_object(**asn) event = json.loads(event.to_json()) return {key: event[key] for key in ('Attribute', 'Object')}
def pattern_pe(self, pattern): attributes = [] sections = defaultdict(dict) pe = MISPObject('pe') pe_uuid = str(uuid.uuid4()) pe.uuid = pe_uuid for p in pattern: p_type, p_value = p.split(' = ') p_value = p_value[1:-1] if ':extensions.' in p_type: if '.sections[' in p_type: p_type_list = p_type.split('.') stix_type = "hashes.{}".format(p_type_list[4][1:-1]) if '.hashes.' in p_type else p_type_list[3] sections[p_type_list[2]][stix_type] = p_value else: stix_type = p_type.split('.')[-1] mapping = pe_mapping[stix_type] pe.add_attribute(**{'type': mapping['type'], 'object_relation': mapping['relation'], 'value': p_value, 'to_ids': True}) else: if 'file:hashes.' in p_type : _, h = p_type.split('.') h = h[1:-1] attributes.append({'type': h, 'object_relation': h, 'value': p_value, 'to_ids': True}) else: mapping = file_mapping[p_type] attributes.append({'type': mapping['type'], 'object_relation': mapping['relation'], 'value': p_value, 'to_ids': True}) for _, section in sections.items(): pe_section = MISPObject('pe-section') for stix_type, value in section.items(): if 'hashes.' in stix_type: h_type = stix_type.split('.')[1] pe_section.add_attribute(**{'type': h_type, 'object_relation': h_type, 'value': value, 'to_ids': True}) else: mapping = pe_section_mapping[stix_type] pe_section.add_attribute(**{'type': mapping['type'], 'object_relation': mapping['relation'], 'value': value, 'to_ids': True}) section_uuid = str(uuid.uuid4()) pe_section.uuid = pe_uuid pe.add_reference(section_uuid, 'included-in') self.misp_event.add_object(**pe_section) self.misp_event.add_object(**pe) return attributes, pe_uuid
def parse_coa(self, courses_of_action): for coa in courses_of_action: misp_object = MISPObject('course-of-action') if coa.title: attribute = {'type': 'text', 'object_relation': 'name', 'value': coa.title} misp_object.add_attribute(**attribute) if coa.type_: attribute = {'type': 'text', 'object_relation': 'type', 'value': coa.type_.value} misp_object.add_attribute(**attribute) if coa.stage: attribute = {'type': 'text', 'object_relation': 'stage', 'value': coa.stage.value} misp_object.add_attribute(**attribute) if coa.description: attribute = {'type': 'text', 'object_relation': 'description', 'value': coa.description.value} # POSSIBLE ISSUE HERE, need example to test misp_object.add_attribute(**attribute) if coa.objective: attribute = {'type': 'text', 'object_relation': 'objective', 'value': coa.objective.description.value} misp_object.add_attribute(**attribute) if coa.cost: attribute = {'type': 'text', 'object_relation': 'cost', 'value': coa.cost.value.value} misp_object.add_attribute(**attribute) if coa.efficacy: attribute = {'type': 'text', 'object_relation': 'efficacy', 'value': coa.efficacy.value.value} misp_object.add_attribute(**attribute) if coa.impact: attribute = {'type': 'text', 'object_relation': 'impact', 'value': coa.impact.value.value} misp_object.add_attribute(**attribute) if coa.parameter_observables: for observable in coa.parameter_observables.observables: properties = observable.object_.properties attribute = MISPAttribute() attribute.type, attribute.value, _ = self.handle_attribute_type(properties) referenced_uuid = str(uuid.uuid4()) attribute.uuid = referenced_uuid self.misp_event.add_attribute(**attribute) misp_object.add_reference(referenced_uuid, 'observable', None, **attribute) self.misp_event.add_object(**misp_object)
def check_hashes(self): if self.offline_mode: self.log('error', 'Offline mode, unable to query VirusTotal') return event_id = self._get_eventid() if event_id is None: return event = self.misp.get(event_id) if self._has_error_message(event): return misp_event = MISPEvent() misp_event.load(event) hashes_to_expand = {} hashes_expanded = [] # Thoses hashes are known and already processed local_samples_hashes = [] partial_objects = {} for o in misp_event.Object: if o.name != 'file': continue if o.has_attributes_by_relation(['md5', 'sha1', 'sha256']): # This object has all the hashes we care about tmphashes = [] tmphashes += [h.value for h in o.get_attributes_by_relation('md5')] tmphashes += [h.value for h in o.get_attributes_by_relation('sha1')] tmphashes += [h.value for h in o.get_attributes_by_relation('sha256')] # Make sure to query VT for the sha256, even if expanded locally hashes_to_expand[o.get_attributes_by_relation('sha256')[0].value] = o.get_attributes_by_relation('sha256')[0] if o.has_attributes_by_relation(['malware-sample']): # ... and it has a malware sample local_samples_hashes += tmphashes hashes_expanded += tmphashes elif o.has_attributes_by_relation(['malware-sample']): # This object has a malware sample, but is missing hashes. We can expand locally. # get the MD5 from the malware-sample attribute malware_sample = o.get_attributes_by_relation('malware-sample')[0] # at most one sample/file object local_samples_hashes.append(malware_sample.value.split('|')[1]) local_samples_hashes += [h.value for h in o.get_attributes_by_relation('md5')] local_samples_hashes += [h.value for h in o.get_attributes_by_relation('sha1')] local_samples_hashes += [h.value for h in o.get_attributes_by_relation('sha256')] if self.args.populate: # The object is missing hashes, keeping track of it for expansion if it isn't already done. partial_objects[o.uuid] = malware_sample else: sha256 = {attribute.value: attribute for attribute in o.get_attributes_by_relation('sha256')} sha1 = {attribute.value: attribute for attribute in o.get_attributes_by_relation('sha1')} md5 = {attribute.value: attribute for attribute in o.get_attributes_by_relation('md5')} if sha256: hashes_to_expand.update(sha256) elif sha1: hashes_to_expand.update(sha1) elif md5: hashes_to_expand.update(md5) for ref_uuid, sample in partial_objects.items(): if sample.value.split('|')[1] in hashes_expanded: # Already expanded in an other object continue new_obj, hashes = self._expand_local_sample(pseudofile=sample.malware_binary, filename=sample.value.split('|')[0], refobj=ref_uuid, default_attributes_paramaters=sample) misp_event.Object += new_obj local_samples_hashes += hashes # Make sure to query VT for the sha256, even if expanded locally hashes_to_expand[hashes[0]] = sample hashes_expanded += local_samples_hashes for a in misp_event.attributes: if a.type == 'malware-sample' and a.value.split('|')[1] not in hashes_expanded: new_obj, hashes = self._expand_local_sample(pseudofile=a.malware_binary, filename=a.value.split('|')[0], default_attributes_paramaters=a) misp_event.Object += new_obj local_samples_hashes += hashes # Make sure to query VT for the sha256, even if expanded locally hashes_to_expand[hashes[0]] = a elif a.type in ('filename|md5', 'filename|sha1', 'filename|sha256'): # We don't care if the hashes are in hashes_expanded or hashes_to_expand: they are firtered out later anyway fname, hashval = a.value.split('|') hashes_to_expand[hashval] = a elif a.type in ('md5', 'sha1', 'sha256'): # We don't care if the hashes are in hashes_expanded or hashes_to_expand: they are firtered out later anyway hashes_to_expand[a.value] = a unk_vt_hashes = [] if cfg.virustotal.virustotal_has_private_key is False: quota = 4 timeout = datetime.datetime.now() + datetime.timedelta(minutes=1) hashes_expanded += local_samples_hashes processed_on_vt = [] # Make sure to start getting reports for the longest possible hashes (reduce risks of collisions) for to_expand in sorted(list(set(hashes_to_expand)), key=len): if to_expand in processed_on_vt: # Always run VT, once per sample continue original_attribute = hashes_to_expand[to_expand] if original_attribute.get('object_id'): original_object_id = original_attribute.get('object_id') vt_object = self._make_VT_object(to_expand, original_attribute) if not vt_object: unk_vt_hashes.append(to_expand) continue result = vt_object.get_report() md5 = result['md5'] sha1 = result['sha1'] sha256 = result['sha256'] processed_on_vt += [sha256, sha1, md5] if all(h in local_samples_hashes for h in [md5, sha1, sha256]): self.log('success', 'Sample available in MISP:') else: self.log('success', 'Sample available in VT:') self.log('item', '{}\n\t{}\n\t{}\n\t{}'.format(result["permalink"], md5, sha1, sha256)) if self.args.populate: if not all(h in hashes_expanded for h in [md5, sha1, sha256]): # If all the "new" expanded hashes are in the hashes_expanded list, skip file_object = MISPObject('file', default_attributes_paramaters=original_attribute) file_object.add_attribute('md5', value=md5) file_object.add_attribute('sha1', value=sha1) file_object.add_attribute('sha256', value=sha256) file_object.add_reference(vt_object.uuid, 'analysed-with') misp_event.Object.append(file_object) hashes_expanded += [md5, sha1, sha256] else: if not original_object_id or original_object_id == '0': # Not an object, but the hashes are in an other object, skipping continue else: # We already have a MISP object, adding the link to the new VT object file_object = misp_event.get_object_by_id(original_object_id) file_object.add_reference(vt_object.uuid, 'analysed-with') misp_event.Object.append(vt_object) if cfg.virustotal.virustotal_has_private_key is False: if quota > 0: quota -= 1 else: waiting_time = (timeout - datetime.datetime.now()).seconds if waiting_time > 0: self.log('warning', 'No private API key, 4 queries/min is the limit. Waiting for {} seconds.'.format(waiting_time)) time.sleep(waiting_time) quota = 4 timeout = datetime.datetime.now() + datetime.timedelta(minutes=1) if self.args.populate: self._populate(misp_event) if len(unk_vt_hashes) > 0: self.log('error', 'Unknown on VT:') for h in unk_vt_hashes: self.log('item', '{}'.format(h))