def parse_and_insert_dnsdbflex(data: str): """Parse and validate the more simplier dndsdbflex output data. Parameters ---------- data as a string Returns ------- A dict with either the error message or the data which may be sent off the the caller of handler() Raises -------- none """ objects = [] try: entries = ndjson.loads(data) for entry in entries: # iterate over all ndjson lines # validate here (simple validation or full JSON Schema validation) if not validate_dnsdbflex(entry): return { "error": "Could not validate the dnsdbflex input '%s'" % entry } # Next, extract some fields rrtype = entry['rrtype'].upper() rrname = entry['rrname'].rstrip('.') # create a new MISP object, based on the passive-dns object for each nd-JSON line try: o = MISPObject(name='passive-dns', standalone=False, distribution=0, comment='DNSDBFLEX import by cof2misp') o.add_attribute('rrtype', value=rrtype, distribution=0, comment='DNSDBFLEX import by cof2misp') o.add_attribute('rrname', value=rrname, distribution=0, comment='DNSDBFLEX import by cof2misp') except Exception as ex: print("could not create object. Reason: %s" % str(ex)) # # add dnsdbflex entry to MISP object # objects.append(o.to_json()) r = {'results': {'Object': [json.loads(o) for o in objects]}} except Exception as ex: misperrors[ "error"] = "An error occured during parsing of input: '%s'" % ( str(ex), ) return misperrors return r
def parse_response(response): mapping = { 'file_name': { 'type': 'filename', 'object_relation': 'filename' }, 'file_size': { 'type': 'size-in-bytes', 'object_relation': 'size-in-bytes' }, 'file_type_mime': { 'type': 'mime-type', 'object_relation': 'mimetype' }, 'md5_hash': { 'type': 'md5', 'object_relation': 'md5' }, 'sha1_hash': { 'type': 'sha1', 'object_relation': 'sha1' }, 'sha256_hash': { 'type': 'sha256', 'object_relation': 'sha256' }, 'ssdeep': { 'type': 'ssdeep', 'object_relation': 'ssdeep' } } misp_event = MISPEvent() for data in response: misp_object = MISPObject('file') for feature, attribute in mapping.items(): if feature in data: misp_attribute = {'value': data[feature]} misp_attribute.update(attribute) misp_object.add_attribute(**misp_attribute) misp_event.add_object(**misp_object) return { 'results': { 'Object': [ json.loads(misp_object.to_json()) for misp_object in misp_event.objects ] } }
def parse_and_insert_cof(data: str) -> dict: """Parse and validate the COF data. Parameters ---------- data as a string Returns ------- A dict with either the error message or the data which may be sent off the the caller of handler() Raises -------- none. All Exceptions will be handled here. On error, a misperror is returned. """ objects = [] try: entries = ndjson.loads(data) for entry in entries: # iterate over all ndjson lines # validate here (simple validation or full JSON Schema validation) if not validate_cof(entry): return { "error": "Could not validate the COF input '%s'" % entry } # Next, extract some fields rrtype = entry['rrtype'].upper() rrname = entry['rrname'].rstrip('.') rdata = [x.rstrip('.') for x in entry['rdata']] # create a new MISP object, based on the passive-dns object for each nd-JSON line o = MISPObject(name='passive-dns', standalone=False, comment='created by cof2misp') # o.add_tag('tlp:amber') # FIXME: we'll want to add a tlp: tag to the object if 'bailiwick' in entry: o.add_attribute('bailiwick', value=entry['bailiwick'].rstrip('.'), distribution=0) # # handle the combinations of rrtype (domain, ip) on both left and right side # if create_specific_attributes: if rrtype in ['A', 'AAAA', 'A6']: # address type # address type o.add_attribute('rrname_domain', value=rrname, distribution=0) for r in rdata: o.add_attribute('rdata_ip', value=r, distribution=0) elif rrtype in ['CNAME', 'DNAME', 'NS']: # both sides are domains o.add_attribute('rrname_domain', value=rrname, distribution=0) for r in rdata: o.add_attribute('rdata_domain', value=r, distribution=0) elif rrtype in ['SOA' ]: # left side is a domain, right side is text o.add_attribute('rrname_domain', value=rrname, distribution=0) # # now do the regular filling up of rrname, rrtype, time_first, etc. # o.add_attribute('rrname', value=rrname, distribution=0) o.add_attribute('rrtype', value=rrtype, distribution=0) for r in rdata: o.add_attribute('rdata', value=r, distribution=0) o.add_attribute( 'raw_rdata', value=json.dumps(rdata), distribution=0) # FIXME: do we need to hex encode it? o.add_attribute('time_first', value=entry['time_first'], distribution=0) o.add_attribute('time_last', value=entry['time_last'], distribution=0) o.first_seen = entry['time_first'] # is this redundant? o.last_seen = entry['time_last'] # # Now add the other optional values. # FIXME: how about a map() other function. DNRY # for k in [ 'count', 'sensor_id', 'origin', 'text', 'time_first_ms', 'time_last_ms', 'zone_time_first', 'zone_time_last' ]: if k in entry and entry[k]: o.add_attribute(k, value=entry[k], distribution=0) # # add COF entry to MISP object # objects.append(o.to_json()) r = {'results': {'Object': [json.loads(o) for o in objects]}} except Exception as ex: misperrors[ "error"] = "An error occured during parsing of input: '%s'" % ( str(ex), ) return misperrors return r
def main(argv): username = '' reg = '' country = '' url = '' if len(sys.argv) <= 1: print('check.py -c <country> -r <registration> -u <username>') sys.exit(2) try: opts, args = getopt.getopt(argv, "hc:r:u:", ["country=", "registration=", "username="******"-c", "--country"): country = arg.lower() elif opt in ("-r", "--registation"): reg = arg.upper() elif opt in ("-u", "--username"): username = arg #------------------------------------------------------------------------ if (country == "fr"): url = "http://www.regcheck.org.uk/api/reg.asmx/CheckFrance" if (country == "es"): url = "http://www.regcheck.org.uk/api/reg.asmx/CheckSpain" if (country == "uk"): url = "http://www.regcheck.org.uk/api/reg.asmx/Check" payload = "RegistrationNumber=" + reg + "&username="******"application/x-www-form-urlencoded", 'cache-control': "no-cache", } reponse = requests.request("POST", url, data=payload, headers=headers) print(reponse.text) for item in reponse.text.split("</vehicleJson>"): if "<vehicleJson>" in item: responseJson = item[item.find("<vehicleJson>") + len("<vehicleJson>"):] vehicleJson = json.loads(responseJson) mispObject = MISPObject('vehicle') carDescription = vehicleJson["Description"] carMake = vehicleJson["CarMake"]["CurrentTextValue"] carModel = vehicleJson["CarModel"]["CurrentTextValue"] ImageUrl = vehicleJson["ImageUrl"] IndicativeValue = "" if (country == "fr"): IndicativeValue = vehicleJson["IndicativeValue"]["CurrentTextValue"] BodyStyle = vehicleJson["BodyStyle"]["CurrentTextValue"] RegistrationDate = vehicleJson["RegistrationDate"] VIN = vehicleJson["ExtendedData"]["numSerieMoteur"] gearbox = vehicleJson["ExtendedData"]["boiteDeVitesse"] dynoHP = vehicleJson["ExtendedData"]["puissanceDyn"] firstRegistration = vehicleJson["ExtendedData"][ "datePremiereMiseCirculation"] mispObject.add_attribute('dyno-power', type='text', value=dynoHP) mispObject.add_attribute('gearbox', type='text', value=gearbox) if (country == "es"): IndicativeValue = vehicleJson["IndicativePrice"] if (country == "es" or country == "uk"): firstRegistration = vehicleJson["RegistrationDate"] VIN = vehicleJson["VehicleIdentificationNumber"] mispObject.add_attribute('description', type='text', value=carDescription) mispObject.add_attribute('make', type='text', value=carMake) mispObject.add_attribute('model', type='text', value=carModel) mispObject.add_attribute('vin', type='text', value=VIN) mispObject.add_attribute('license-plate-number', type='text', value=reg) mispObject.add_attribute('lindicative-value', type='text', value=IndicativeValue) mispObject.add_attribute('date-first-registration', type='text', value=firstRegistration) mispObject.add_attribute('image-url', type='text', value=ImageUrl) print(mispObject.to_json()) with open(country + '_' + reg + '.json', 'w') as outfile: outfile.write(mispObject.to_json()) print("Description: " + carDescription) print("Make: " + carMake) print("Model: " + carModel)
}) twitter_object.add_attribute('geo', **{ 'type': 'text', 'value': tweet.geo }) for url in tweet.urls: twitter_object.add_attribute('embedded-link', **{ 'type': 'text', 'value': url }) twitter_object.add_attribute('post-id', **{ 'type': 'text', 'value': tweet.id }) setattr(twitter_object, 'first_seen', tweet.datestamp) t = json.loads(twitter_object.to_json()) output = {} output['Object'] = [] output['Object'].append(t) print(json.dumps(output)) if not args.disable_push: ail_publish(data=json.dumps( output_tweet, indent=4, sort_keys=True, default=jsonclean)) for url in urls: output = {} output['source'] = ailurlextract output['source-uuid'] = uuid output['default-encoding'] = 'UTF-8' output['meta'] = {} output['meta']['parent:twitter:tweet_id'] = tweet.id