def taxii_import(server_url, collection_url): if not (server_url or collection_url): print('Please specify one of --server_url or --collection_url') exit(-1) if server_url: collection_url = _get_collection_url(server_url) if collection_url: print('Importing data from collection at: {0:s}'.format(collection_url)) collection = Collection(collection_url) tc_source = TAXIICollectionSource(collection) all_objects = {} for name, yeti_class in OBJECT_CLASSES.items(): print('Fetching', name) stats = { 'updated': 0, 'new': 0, 'skipped': 0, } try: for item in tc_source.query(Filter('type', '=', name)): item_json = json.loads(item.serialize()) obj = yeti_class.get(item.id) if not obj: obj = yeti_class(**item).save() stats['new'] += 1 elif obj.modified >= item.modified or obj.revoked or obj.equals(item_json): stats['skipped'] += 1 elif obj.modified < item.modified: obj.update(item_json) stats['updated'] += 1 all_objects[item['id']] = obj except requests.exceptions.HTTPError as error: print(f'HTTPError: {error}') except datastore.DataSourceError as error: print(f'DataSourceError: {error}') print(f"[{name}] New: {stats['new']}, Updated: {stats['updated']}, " f"Skipped: {stats['skipped']}") print('Getting relationships') stats = 0 taxii_filter = Filter('type', '=', 'relationship') for relationship in tc_source.query(taxii_filter): stats += 1 source = _lazy_get_object(all_objects, relationship.source_ref) target = _lazy_get_object(all_objects, relationship.target_ref) source.link_to(target, stix_rel=json.loads( relationship.serialize())) print('Added {0:d} relationships'.format(stats))
def retrieve_attack_as_list(): server = Server("https://cti-taxii.mitre.org/taxii/") api_root = server.api_roots[0] for collection in api_root.collections: logging.info(collection.title + ":" + collection.id) attack = {} collection = Collection( "https://cti-taxii.mitre.org/stix/collections/95ecc380-afe9-11e4-9b6c-751b66dd541e/" ) tc_source = TAXIICollectionSource(collection) filter_objs = { "techniques": Filter("type", "=", "attack-pattern"), "mitigations": Filter("type", "=", "course-of-action"), "groups": Filter("type", "=", "intrusion-set"), "malware": Filter("type", "=", "malware"), "tools": Filter("type", "=", "tool"), "relationships": Filter("type", "=", "relationship") } techniques = tc_source.query(filter_objs['techniques']) all_keys = gather_keys(techniques) parsed_techniques = [] for technique in techniques: parsed_technique = flatten_technique(technique, all_keys) parsed_techniques = parsed_techniques + parsed_technique return parsed_techniques
def ma_get_definitions(): # Load MITRE's Att&ck Enterprise definitions collection = Collection(MA_ENTERPRISE_TAXII_URL) tc_source = TAXIICollectionSource(collection) stix_filter = Filter("type", "=", "attack-pattern") attack = tc_source.query(stix_filter) return ma_parse(attack)
def polling(poll_url): # poll collection = Collection(poll_url, 'user1', 'Password1') tc_source = TAXIICollectionSource(collection) f1 = Filter("type", "=", "indicator") indicators = tc_source.query([f1]) n = 0 for indicator in indicators: print(indicator) filewrite(indicator, n) n += 1
def mitre_tactics_import(): collection = Collection(MITRE_TACTICS_URL) tc_source = TAXIICollectionSource(collection) kc = KillChains.get_or_create(name='mitre-attack', human_name='MITRE ATT&CK') kc.description = 'The MITRE ATT&CK tactics are represented as kill-chains in STIX2' kc.save() for item in tc_source.query(Filter('type', '=', 'x-mitre-tactic')): print(f'Adding {item["x_mitre_shortname"]}') kc.add_phase_to_killchain({ 'name': item['x_mitre_shortname'], 'description': item['description'] })
def main(): parser = argparse.ArgumentParser() parser.add_argument("--matrix", type=str, action="store", default="enterprise", help="Matrix to query (enterprise, mobile, pre)") # Techniques parser.add_argument( "--dump-all-techniques", action="store_true", help="Dump a CSV file with technique,subtechnique,name") # Data Sources parser.add_argument( "--dump-data-sources", action="store_true", help="Dump a list of unique data sources to data_sources.txt") parser.add_argument( "--dump-metadata", action="store_true", help= "Dump a CSV file technique-metadata.csv, containing unique technique-metadata pairings." ) parser.add_argument( "--dump-matching-techniques", action="store_true", help= "Dump techniques that map to match-data-sources to matching-techniques.txt" ) parser.add_argument( "--match-data-sources", type=str, action="store", help= "A file containing a list of data sources that to match against techniques." ) args = parser.parse_args() match_data_sources = None if args.match_data_sources: match_data_sources = parse_data_source_list(args.match_data_sources) args.matrix = args.matrix.lower() if args.matrix == 'pre': matrix = "062767bd-02d2-4b72-84ba-56caef0f8658" elif args.matrix == 'mobile': matrix = "2f669986-b40b-4423-b720-4396ca6a462b" elif args.matrix == 'enterprise': matrix = "95ecc380-afe9-11e4-9b6c-751b66dd541e" # Initialize dictionary to hold Enterprise ATT&CK content attack = {} # Establish TAXII2 Collection instance for Enterprise ATT&CK collection = Collection("https://cti-taxii.mitre.org/stix/collections/{0}/"\ .format(matrix)) # Supply the collection to TAXIICollection tc_source = TAXIICollectionSource(collection) # Create filters to retrieve content from Enterprise ATT&CK filter_objs = {"techniques": Filter("type", "=", "attack-pattern")} # Retrieve all Enterprise ATT&CK content for key in filter_objs: attack[key] = tc_source.query(filter_objs[key]) all_techniques = attack["techniques"] all_techniques = remove_revoked_deprecated(all_techniques) technique_count = 0 techniques_without_data_source = 0 techniques_observable = 0 techniques_with_data_sources = [] data_sources = set() matching_techniques = set() for technique in all_techniques: technique_count += 1 technique_id = technique['external_references'][0]['external_id'] if 'x_mitre_data_sources' in technique.keys(): if match_data_sources is not None: if data_source_match(technique['x_mitre_data_sources'], match_list=match_data_sources) == True: techniques_observable += 1 if args.dump_matching_techniques == True: matching_techniques.add(technique_id) if args.dump_data_sources == True: [ data_sources.add(data_source) for data_source in technique['x_mitre_data_sources'] ] if args.dump_metadata == True: [ techniques_with_data_sources.append( (technique_id, data_source)) for data_source in technique['x_mitre_data_sources'] ] else: techniques_without_data_source += 1 # Output files based on input arguments. if match_data_sources is not None: print('Techniques: {0}'.format(technique_count)) print('Techniques Observable: {0} ({1}%)'\ .format(techniques_observable, round((techniques_observable / technique_count) * 100))) if args.dump_data_sources == True: with open('data_sources.txt', 'w') as fh_data_sources: data_sources = list(data_sources) data_sources.sort() for data_source in data_sources: fh_data_sources.write('{0}\n'.format(data_source)) if args.dump_matching_techniques == True: with open('matching_techniques.txt', 'w') as fh_matching_techniques: matching_techniques = list(matching_techniques) matching_techniques.sort() for data_source in matching_techniques: fh_matching_techniques.write('{0}\n'.format(data_source)) if args.dump_metadata == True: with open('technique_metadata.csv', 'w') as fh_techniques: csvwriter = csv.writer(fh_techniques, quoting=csv.QUOTE_ALL) csvwriter.writerow(['id', 'data_source']) for technique in techniques_with_data_sources: csvwriter.writerow(technique) if args.dump_all_techniques == True: with open('all_techniques.csv', 'w') as fh_all_techniques: csvwriter = csv.writer(fh_all_techniques, quoting=csv.QUOTE_ALL) csvwriter.writerow([ 'technique_id', 'technique_name', 'technique_url', 'technique_description' ]) for technique in all_techniques: # Handle techniques that do not have a description (these probably # should not exist and should be contributed). try: description = technique.description except AttributeError: description = 'NONE' csvwriter.writerow([ technique.external_references[0].external_id, technique.name, technique.external_references[0].url, description ])
def taxii_import(server_url, collection_url): if not (server_url or collection_url): print('Please specify one of --server_url or --collection_url') exit(-1) if server_url: collection_url = _get_collection_url(server_url) if collection_url: print( 'Importing data from collection at: {0:s}'.format(collection_url)) collection = Collection(collection_url) tc_source = TAXIICollectionSource(collection) object_classes = { 'attack-pattern': attack_pattern.AttackPattern, 'campaign': campaign.Campaign, 'course-of-action': course_of_action.CourseOfAction, 'identity': identity.Identity, 'intrusion-set': intrusion_set.IntrusionSet, 'malware': malware.Malware, 'threat-actor': threat_actor.ThreatActor, 'tool': tool.Tool, 'vulnerability': vulnerability.Vulnerability, } all_objects = {} for name, yeti_class in object_classes.items(): print('Fetching', name) stats = { 'updated': 0, 'new': 0, 'skipped': 0, } try: for item in tc_source.query(Filter('type', '=', name)): item_json = json.loads(item.serialize()) obj = yeti_class.get(item.id) if not obj: obj = yeti_class(**item).save() stats['new'] += 1 all_objects[item['id']] = obj if obj.modified >= item.modified or obj.revoked or obj.equals( item_json): stats['skipped'] += 1 elif obj.modified < item.modified: obj.update(item_json) stats['updated'] += 1 except requests.exceptions.HTTPError as error: print(f'HTTPError: {error}') except datastore.DataSourceError as error: print(f'DataSourceError: {error}') print( f"[{name}] New: {stats['new']}, Updated: {stats['updated']}, " f"Skipped: {stats['skipped']}") print('Getting relationships') stats = 0 taxii_filter = Filter('type', '=', 'relationship') for relationship in tc_source.query(taxii_filter): stats += 1 source = all_objects[relationship.source_ref] target = all_objects[relationship.target_ref] source.link_to(target, stix_rel=json.loads(relationship.serialize())) print('Added {0:d} relationships'.format(stats))
# # Create filters to retrieve content from Enterprise ATT&CK based on type # filter_objs = { # "techniques": Filter("type", "=", "attack-pattern") # } # filter_objs = { # "techniques": Filter("type", "=", "kill-chain-phase") # } # # Retrieve all Enterprise ATT&CK content # for key in filter_objs: # attack_dict[key] = tc_source.query(filter_objs[key]) # # # For visual purposes, print the first technique received from the server # print(attack_dict["techniques"][1]) # =============================================================================== technique_filter = Filter("type", "=", "attack-pattern") response = tc_source.query(technique_filter) print('Type: ' + str(type(response))) print('--') print(response[0]) print(response[0]['kill_chain_phases']) print('Type: ' + str(type(response[0]['kill_chain_phases']))) print('Len: ' + str(len(response[0]['kill_chain_phases']))) print('List:' + str(response[0]['kill_chain_phases'][0])) print(response[0]['kill_chain_phases'][0]['phase_name']) # Test test