def create_infrastructure_object_sdo(self, infrastructure_object, enriched_ioc, indicator_id): try: stix_type = 'infrastructure' DEFAULT_SPEC_VERSION = "2.1" now = "{}Z".format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) infrastructure = { 'type': stix_type, 'spec_version' : DEFAULT_SPEC_VERSION, 'id': stix_type + '--' + str(uuid.uuid4()), 'created': now, 'modified': now, 'name': 'Infrastructure related to ' + enriched_ioc, 'infrastructure_types': infrastructure_object['infrastructure_types'], 'description' : infrastructure_object['description'] if infrastructure_object.get('description') is not None else ','.join(infrastructure_object.get('infrastructure_types')) } infrastructure_types = self.normalized_infra_type(infrastructure['infrastructure_types']) infrastructure['infrastructure_types'] = infrastructure_types if self.stix_validator: options = ValidationOptions(version="2.1") results = validate_instance(infrastructure, options) if results.is_valid is False: print_results(results) raise Exception(f'Invalid parameter set in infrastructure SDO. Please follow STIX 2.1 spec for properties') infrastructure_array = [infrastructure] relationship = self.createRelationship(infrastructure_array, indicator_id) infrastructure_array += relationship return infrastructure_array except Exception as err: raise Exception(f'Exception occurred in create_infrastructure_object_sdo : {err}')
def create_identity_sdo(self, data_source, namespace): try: DETERMINISTIC_IDENTITY_ID = uuid.uuid5(uuid.UUID(namespace), data_source['name']) DEFAULT_SPEC_VERSION = '2.1' stix_type = 'identity' now = "{}Z".format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) stix_identity_sdo = { 'type': stix_type, 'name': data_source['name'], 'spec_version': DEFAULT_SPEC_VERSION, 'id': stix_type + '--' + str(DETERMINISTIC_IDENTITY_ID), 'created': now, 'modified': now, } if data_source.get('description'): stix_identity_sdo['description'] = data_source['description'] if data_source.get('roles'): stix_identity_sdo['roles'] = data_source['roles'] if data_source.get('identity_class'): stix_identity_sdo['identity_class'] = data_source['identity_class'] if data_source.get('sectors'): stix_identity_sdo['sectors'] = data_source['sectors'] if data_source.get('sectors'): stix_identity_sdo['sectors'] = data_source['sectors'] if data_source.get('contact_information'): stix_identity_sdo['contact_information'] = data_source['contact_information'] if self.stix_validator: options = ValidationOptions(version="2.1") results = validate_instance(stix_identity_sdo, options) if results.is_valid is False: print_results(results) raise Exception(f'Invalid parameter set in identity SDO. Please follow STIX 2.1 spec for properties') return [stix_identity_sdo] except Exception as err: raise Exception(f'Exception occurred in create_identity_sdo in BaseNormalization : {err}')
def main(): # Parse command line arguments options = parse_args(sys.argv[1:], is_script=True) # Only print prompt if script is run on cmdline and no input is piped in if options.files == sys.stdin and os.isatty(0): print('Input STIX content, then press Ctrl+D: ') try: # Validate input documents results = run_validation(options) # Print validation results print_results(results) # Determine exit status code and exit. code = codes.get_code(results) sys.exit(code) except (ValidationError, IOError) as ex: output.error("Validation error occurred: '%s'" % str(ex), codes.EXIT_VALIDATION_ERROR) except Exception: logger.exception("Fatal error occurred") sys.exit(codes.EXIT_FAILURE)
def create_sighting_sdo(self, sighting_object, indicator_id): try: stix_type = 'sighting' DEFAULT_SPEC_VERSION = "2.1" now = "{}Z".format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) sighting = { 'type': stix_type, 'spec_version' : DEFAULT_SPEC_VERSION, 'id': stix_type + '--' + str(uuid.uuid4()), 'sighting_of_ref': indicator_id, 'count': sighting_object['count'], 'created': now, 'modified': now } if self.stix_validator: options = ValidationOptions(version="2.1") results = validate_instance(sighting, options) if results.is_valid is False: print_results(results) raise Exception(f'Invalid parameter set in sighting SDO. Please follow STIX 2.1 spec for properties') return [sighting] except Exception as err: raise Exception(f'Exception occurred in create_sighting_sdo in BaseNormalization : {err}')
def main(): # Parse command line arguments parser = _get_arg_parser() args = parser.parse_args() options = ValidationOptions(args) try: # Set the output level (e.g., quiet vs. verbose) output.set_level(options.verbose) # Validate input documents results = run_validation(options) # Print validation results print_results(results) # Determine exit status code and exit. code = codes.get_code(results) sys.exit(code) except (ValidationError, IOError) as ex: output.error("Validation error occurred: '%s'" % str(ex), codes.EXIT_VALIDATION_ERROR) except Exception: logging.exception("Fatal error occurred") sys.exit(codes.EXIT_FAILURE)
def validatefile(self, filename): """ Validate a STIX 2 file ensuring its authenticity :param filename: """ results = validate_file(filename) print_results(results)
def transform(self, obj): """ Transforms the given object in to a STIX observation based on the mapping file and transform functions :param obj: the datasource object that is being converted to stix :return: the input object converted to stix valid json """ object_map = {} stix_type = 'observed-data' ds_map = self.ds_to_stix_map observation = { 'id': stix_type + '--' + str(uuid.uuid4()), 'type': stix_type, 'created_by_ref': self.identity_id, 'objects': {} } # create normal type objects if isinstance(obj, dict): for ds_key in obj.keys(): self._transform(object_map, observation, ds_map, ds_key, obj) else: print("Not a dict: {}".format(obj)) # Validate each STIX object if self.stix_validator: validated_result = validate_instance(observation) print_results(validated_result) return observation
def transform(self, obj): """ Transforms the given object in to a STIX observation based on the mapping file and transform functions :param obj: the datasource object that is being converted to stix :return: the input object converted to stix valid json """ NUMBER_OBSERVED_KEY = 'number_observed' object_map = {} stix_type = 'observed-data' ds_map = self.ds_to_stix_map observation = { 'id': stix_type + '--' + str(uuid.uuid4()), 'type': stix_type, 'created_by_ref': self.identity_id, 'created': "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]), 'modified': "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]), 'objects': {} } # create normal type objects if isinstance(obj, dict): for ds_key in obj.keys(): self._transform(object_map, observation, ds_map, ds_key, obj) else: self.logger.debug("Not a dict: {}".format(obj)) # Add required property to the observation if it wasn't added via the mapping if self.options.get('unmapped_fallback'): if "first_observed" not in observation and "last_observed" not in observation: observation['first_observed'] = "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) observation['last_observed'] = "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) # Add required property to the observation if it wasn't added via the mapping if NUMBER_OBSERVED_KEY not in observation: observation[NUMBER_OBSERVED_KEY] = 1 # Validate each STIX object if self.stix_validator: validated_result = validate_instance(observation) print_results(validated_result) return observation
def create_malware_sdo(self,malware_object, indicator_id, enriched_ioc): try: malware_array=[] if isinstance(malware_object, list): for data in malware_object: #print(data) stix_type = 'malware' DEFAULT_SPEC_VERSION = "2.1" now = "{}Z".format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) malware = { 'type': stix_type, 'name': data.get('name') if data.get('name') is not None else 'Malware related to ' + enriched_ioc, 'spec_version': DEFAULT_SPEC_VERSION, 'id': stix_type + '--' + str(uuid.uuid4()), 'created': now, 'modified': now, 'malware_types': data.get('malware_types') if data.get('malware_types') is not None else ['unknown'], 'is_family' : data.get('is_family') if data.get('is_family') is not None else False } # right now its iterates additional attributes of malware SDO and no null, empty list is not checked. Developer has to ensure not to send such data for key,value in data.items(): if key is not malware: malware[key] = value # set the description same as malware type returns from threat feed if description property is not provided. if data.get('description'): malware['description'] = data.get('description') elif data.get('malware_types') and 'unknown' not in data.get('malware_types'): malware['description'] = ','.join(data.get('malware_types')) if isinstance(data.get('malware_types'),list) else data.get('malware_types') malware_types = self.normalized_malware_type(malware['malware_types']) malware['malware_types'] = malware_types # malware SDO properties validation if self.stix_validator: options = ValidationOptions(version="2.1") results = validate_instance(malware, options) if results.is_valid is False: print_results(results) raise Exception(f'Invalid parameter set in malware SDO. Please follow STIX 2.1 spec for properties') # if name is not present then compare only malware_types to remove duplicate else check malware types and name. if (len([i for i in malware_array if (i['malware_types'] == malware ['malware_types'] and i['name'] == malware ['name'])]) == 0): malware_array.append(malware) relationship = self.createRelationship(malware_array, indicator_id) malware_array += relationship return malware_array except Exception as err: raise Exception(f'Exception occurred in create_malware_sdo in BaseNormalization : {err}')
def create_indicator_sdo(self, indicator_object: dict, identity_id: str, extension_id:str=None, nested_properties:list=None, top_properties:list=None): try: # Param: Dictionary stix_type = 'indicator' pattern_type = 'stix' DEFAULT_SPEC_VERSION = "2.1" now = "{}Z".format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) # Exception handle required property if 'pattern' not in indicator_object: raise ValueError(f'Missing required indicator property: pattern') indicator = { 'type': stix_type, 'spec_version': DEFAULT_SPEC_VERSION, 'id': stix_type + '--' + str(uuid.uuid4()), 'pattern': indicator_object['pattern'], 'pattern_type': pattern_type, 'created_by_ref': identity_id, 'created': now, 'modified': now, 'valid_from': now, } if indicator_object.get('name'): indicator['name'] = indicator_object['name'] if indicator_object.get('description'): indicator['description'] = indicator_object['description'] if indicator_object.get('pattern_version'): indicator['pattern_version'] = indicator_object['pattern_version'] if indicator_object.get('valid_until'): indicator['valid_until'] = indicator_object['valid_until'] if indicator_object.get('kill_chain_phases'): indicator['kill_chain_phases'] = indicator_object['kill_chain_phases'] if indicator_object.get('indicator_types'): indicator['indicator_types'] = indicator_object['indicator_types'] if indicator_object.get('external_references'): indicator['external_references'] = indicator_object['external_references'] if (extension_id): indicator = self.add_extension(indicator, extension_id, nested_properties, top_properties) # indicator SDO properties validation if self.stix_validator: options = ValidationOptions(version="2.1") results = validate_instance(indicator, options) if results.is_valid is False: print_results(results) raise Exception(f'Invalid parameter set in indicator SDO. Please follow STIX 2.1 spec for properties') return [indicator] except ValueError as err: raise ValueError(err)
def assertFalseWithOptions(self, instance, **kwargs): """Test that the given instance is NOT valid when using the validation options provided by kwargs. Args: instance: The JSON string to be validated. kwargs: Any number of keyword arguments to be passed to the ValidationOptions constructor. """ if 'strict' in kwargs: options = ValidationOptions(version="2.0", **kwargs) else: options = ValidationOptions(strict=True, version="2.0", **kwargs) results = validate_parsed_json(instance, options) print_results(results) self.assertEqual(results.is_valid, False)
def __main__(): bundle_file = sys.argv[1] try: with open(bundle_file) as f: bundle = json.load(f) results = validate_instance(bundle) if results.is_valid is not True: print_results(results) raise Exception() print('*** STIX Bundle validated!!\n') except ValueError as ex: print("*** Malformed JSON in the STIX Bundle: " + str(ex)) except Exception as ex: print( "\n *** Invalid STIX Objects found in the bundle. Please fix the error marked as Red[X]. Warnings marked as yellow [!] can be ingnored but recommended to fix ***\n" )
def create_extension_sdo(self, identity_object, namespace, nested_properties=[], toplevel_properties=[], schema='https://www.ibm.com/cp4s'): try: # Create an extension-definition object to be used in conjunction with STIX Indicator object stix_type = 'extension-definition' DEFAULT_SPEC_VERSION = "2.1" EXTENSION_VERSION = '1.2.1' extension_object = { 'id': stix_type + '--' + str(uuid.uuid5(uuid.UUID(namespace), 'extension-definition')), 'type': stix_type, 'spec_version': DEFAULT_SPEC_VERSION, 'name': (identity_object.get('name') + ' extension') if identity_object.get('name') is not None else "extension definition object", 'created': "{}Z".format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]), 'modified': "{}Z".format(datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]), 'created_by_ref': identity_object['id'], 'schema': schema, 'version': EXTENSION_VERSION, } if identity_object.get('description'): extension_object['description'] = 'Extension object for ' + identity_object.get('description') if (len(nested_properties) > 0 or len(toplevel_properties) > 0): extension_object['extension_types'] = [] extension_object['extension_properties'] = [] if (len(toplevel_properties) > 0): extension_object['extension_types'].append('toplevel-property-extension') for prop in toplevel_properties: extension_object['extension_properties'].append(prop) if (len(nested_properties) > 0): extension_object['extension_types'].append('property-extension') if (not len(extension_object['extension_properties']) > 0): del extension_object['extension_properties'] if self.stix_validator: options = ValidationOptions(version="2.1") results = validate_instance(extension_object, options) if results.is_valid is False: print_results(results) raise Exception(f'Invalid parameter set in extension_object SDO. Please follow STIX 2.1 spec for properties') stix_extension_sdo = [extension_object] return stix_extension_sdo except Exception as err: raise Exception(f'Exception occurred in create_extension_sdo in BaseNormalization : {err}')
def __main__(): bundle_file = sys.argv[1] try: with open(bundle_file) as f: bundle = json.load(f) results = validate_instance(bundle) if results.is_valid: print_results(results) print( "\n *** STIX bundle is valid but may contain warnings. Warnings marked as yellow [!] can be ignored but recommended to fix ***\n" ) else: print_results(results) print( "\n *** Invalid STIX Objects found in the bundle. Please fix the error marked as Red[X]. Warnings marked as yellow [!] can be ignored but recommended to fix ***\n" ) except ValueError as ex: print("*** Malformed JSON in the STIX Bundle: " + str(ex))
def main(): # Parse command line arguments options = parse_args(sys.argv[1:], is_script=True) try: # Validate input documents results = run_validation(options) # Print validation results print_results(results) # Determine exit status code and exit. code = codes.get_code(results) sys.exit(code) except (ValidationError, IOError) as ex: output.error( "Validation error occurred: '%s'" % str(ex), codes.EXIT_VALIDATION_ERROR ) except Exception: logger.exception("Fatal error occurred") sys.exit(codes.EXIT_FAILURE)
def main(): gs_cam = Campaign( name='GRIZZLY STEPPE', created_by_ref=default_creator.id, object_marking_refs=[default_tlp.id], first_seen=datetime.datetime(2015, 6, 1, 0, 0, 0).isoformat('T') + 'Z', description= 'Cyber-enabled operations alleged by US Government to originate from Russian activity against US Political targets' ) # Attribution Objects and linkages (pivoting off Campaign) ris_ta = ThreatActor( name='RIS', description='Russian civilian and military intelligence Services (RIS)', created_by_ref=default_creator.id, object_marking_refs=[default_tlp.id], labels=['nation-state']) apt28_is = IntrusionSet(name='APT28', created_by_ref=default_creator.id, object_marking_refs=[default_tlp.id]) apt29_is = IntrusionSet(name='APT29', created_by_ref=default_creator.id, object_marking_refs=[default_tlp.id]) apt28_ris_rel = Relationship(relationship_type='attributed-to', source_ref=apt28_is.id, target_ref=ris_ta.id) apt29_ris_rel = Relationship(relationship_type='attributed-to', source_ref=apt29_is.id, target_ref=ris_ta.id) gs_apt28_rel = Relationship(relationship_type='attributed-to', source_ref=gs_cam.id, target_ref=apt28_is.id) gs_apt29_rel = Relationship(relationship_type='attributed-to', source_ref=gs_cam.id, target_ref=apt29_is.id) attribution = [ gs_cam, ris_ta, apt28_is, apt29_is, apt28_ris_rel, apt29_ris_rel, gs_apt28_rel, gs_apt29_rel ] data = [] with open('test.csv', 'rb') as csvfile: reader = csv.reader(csvfile) for row in reader: if row[0] is not '' and row[1] is not '': data.append([row[0], row[1]]) list_length = len(data) uri_inds = [[] for j in range(list_length)] ip_inds = [[] for j in range(list_length)] mal_inds = [[] for j in range(list_length)] for i in range(list_length): is_file = False pattern_type = None pattern_value = data[i][0] desc = 'Suspected GRIZZLY STEPPE comms' if data[i][1] == 'IPV4ADDR': pattern_type = "ipv4-addr:value" elif data[i][1] == 'FQDN': pattern_type = "domain-name:value" elif data[i][1] == 'URL': pattern_type = "url:value" elif data[i][1] == 'MD5': is_file = True pattern_type = "file:hashes.MD5" if ENRICH: vt_results = get_behavior_VT(pattern_value) if vt_results: for netloc in vt_results['netlocs']: uri_ind = Indicator( name='GRIZZLY STEPPE URI', created_by_ref=vt_ident.id, object_marking_refs=[default_tlp.id], labels=['malicious-activity'], pattern="[domain-name:value = '%s']" % str(netloc)) uri_inds[i].append(uri_ind) for ip in vt_results['ips']: ip_ind = Indicator( name='GRIZZLY STEPPE IP', created_by_ref=vt_ident.id, object_marking_refs=[default_tlp.id], labels=['malicious-activity'], pattern="[ipv4-addr:value = '%s']" % str(ip)) ip_inds[i].append(ip_ind) if pattern_type is not None: ind = Indicator(name='GRIZZLY STEPPE', created_by_ref=default_creator.id, object_marking_refs=[default_tlp.id], description=desc, labels=['malicious-activity'], pattern="[%s = '%s']" % (pattern_type, pattern_value)) data[i].append(ind) if uri_inds[i] or ip_inds[i]: gs_mal = Malware( name='OnionDuke', created_by_ref=default_creator.id, object_marking_refs=[default_tlp.id], description='Malware identified as OnionDuke C2 software', labels=['remote-access-trojan']) data[i].append(gs_mal) cam_mal_rel = Relationship(relationship_type='uses', source_ref=gs_cam.id, target_ref=gs_mal.id) data[i].append(cam_mal_rel) ind_mal_rel = Relationship(relationship_type='indicates', source_ref=ind.id, target_ref=gs_mal.id) data[i].append(ind_mal_rel) if uri_inds[i]: for uri in uri_inds[i]: uri_malind_rel = Relationship( relationship_type='indicates', source_ref=uri.id, target_ref=gs_mal.id) data[i].append(uri_malind_rel) if ip_inds[i]: for ip in ip_inds[i]: ip_malind_rel = Relationship( relationship_type='indicates', source_ref=ip.id, target_ref=gs_mal.id) data[i].append(ip_malind_rel) else: ind_cam_rel = Relationship(relationship_type='indicates', source_ref=ind.id, target_ref=gs_cam.id) data[i].append(ind_cam_rel) all_sdo = get_all_SDO() orig_report = [] enrich_inds = [] ind_cam_rels = [] relationships = [] malwarez = [] for sdo in all_sdo: if sdo.type == 'indicator': # Just the report if sdo.name == 'GRIZZLY STEPPE': orig_report.append(sdo) else: # Extra stuff from enrichment (if any) enrich_inds.append(sdo) # All of the other stuff I added! if sdo.type == 'relationship': relationships.append(sdo) if sdo.target_ref == gs_cam.id: ind_cam_rels.append(sdo) if sdo.type == 'malware': malwarez.append(sdo) bun_ind = Bundle(objects=orig_report) attr_context = orig_report + attribution + ind_cam_rels bun_attr = Bundle(objects=attr_context) full_context = attr_context + malwarez + enrich_inds + relationships bun_full = Bundle(objects=full_context) trusted = attribution + malwarez + enrich_inds + relationships bun_trust = Bundle(objects=trusted) with open('./out/1_orig_report.json', 'wb') as f: f.write(str(bun_ind)) with open('./out/2_with_attribution.json', 'wb') as f: f.write(str(bun_attr)) with open('./out/3_all_enriched.json', 'wb') as f: f.write(str(bun_full)) with open('./out/4_trusted.json', 'wb') as f: f.write(str(bun_trust)) results = validate_file('./out/1_orig_report.json') print_results(results) results = validate_file('./out/2_with_attribution.json') print_results(results) results = validate_file('./out/3_all_enriched.json') print_results(results) results = validate_file('./out/4_trusted.json') print_results(results) results = put_elk(*trusted)
def transform(self, obj): """ Transforms the given object in to a STIX observation based on the mapping file and transform functions :param obj: the datasource object that is being converted to stix :return: the input object converted to stix valid json """ object_map = {} stix_type = 'observed-data' ds_map = self.ds_to_stix_map now = "{}Z".format( datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) object_id_map = {} observation = { 'id': stix_type + '--' + str(uuid.uuid4()), 'type': stix_type, 'created_by_ref': self.identity_id, 'created': now, 'modified': now, 'objects': {} } # create normal type objects if isinstance(obj, dict): for ds_key in obj.keys(): self._transform(object_map, observation, ds_map, ds_key, obj) else: self.logger.debug("Not a dict: {}".format(obj)) # special case: # remove object if: # a reference attribute object does not contain at least one property other than 'type' self._cleanup_references(object_map, observation, ds_map) # Add required properties to the observation if it wasn't added from the mapping if FIRST_OBSERVED_KEY not in observation: observation[FIRST_OBSERVED_KEY] = now if LAST_OBSERVED_KEY not in observation: observation[LAST_OBSERVED_KEY] = now if NUMBER_OBSERVED_KEY not in observation: observation[NUMBER_OBSERVED_KEY] = 1 if self.spec_version == "2.1": cybox_objects = observation["objects"] self._generate_and_apply_deterministic_id(object_id_map, cybox_objects) self._replace_references(object_id_map, cybox_objects) object_refs = [] # add cybox references to observed-data object for key, value in object_id_map.items(): object_refs.append(value) observation["object_refs"] = object_refs observation["spec_version"] = "2.1" self._collect_unique_cybox_objects(cybox_objects) # Validate each STIX object if self.stix_validator: validated_result = validate_instance(observation) print_results(validated_result) return observation
def transform(self, obj): """ Transforms the given object in to a STIX observation based on the mapping file and transform functions :param obj: the datasource object that is being converted to stix :return: the input object converted to stix valid json """ object_map = {} stix_type = 'observed-data' ds_map = self.ds_to_stix_map transformers = self.transformers observation = { 'id': stix_type + '--' + str(uuid.uuid4()), 'type': stix_type, 'created_by_ref': self.identity_id, 'objects': {} } # create normal type objects for ds_key in obj: if ds_key not in ds_map: logging.debug( '{} is not found in map, skipping'.format(ds_key)) continue # get the stix keys that are mapped ds_key_def_obj = self.ds_to_stix_map[ds_key] ds_key_def_list = ds_key_def_obj if isinstance( ds_key_def_obj, list) else [ds_key_def_obj] for ds_key_def in ds_key_def_list: if ds_key_def is None or 'key' not in ds_key_def: logging.debug( '{} is not valid (None, or missing key)'.format( ds_key_def)) continue key_to_add = ds_key_def['key'] transformer = transformers[ds_key_def[ 'transformer']] if 'transformer' in ds_key_def else None if ds_key_def.get('cybox', self.cybox_default): object_name = ds_key_def.get('object') if 'references' in ds_key_def: stix_value = object_map[ds_key_def['references']] else: stix_value = DataSourceObjToStixObj._get_value( obj, ds_key, transformer) if not DataSourceObjToStixObj._valid_stix_value( self.properties, key_to_add, stix_value): continue DataSourceObjToStixObj._handle_cybox_key_def( key_to_add, observation, stix_value, object_map, object_name) else: stix_value = DataSourceObjToStixObj._get_value( obj, ds_key, transformer) if not DataSourceObjToStixObj._valid_stix_value( self.properties, key_to_add, stix_value): continue DataSourceObjToStixObj._add_property( observation, key_to_add, stix_value) # Validate each STIX object if self.stix_validator: validated_result = validate_instance(observation) print_results(validated_result) return observation
def validate_stix(self, pathlist): for path in pathlist: results = validate_file(str(path), self.validation_options) with self.subTest(str(path)): print_results([results]) self.assertTrue(results.is_valid)
def transform(self, obj): """ Transforms the given object in to a STIX observation based on the mapping file and transform functions :param obj: the datasource object that is being converted to stix :return: the input object converted to stix valid json """ index = 0 ref_objs = {} linked_objs = {} stix_type = 'observed-data' uniq_id = str(uuid.uuid4()) ds_map = self.dsToStixMap xformers = self.transformers observation = { 'x_com_ibm_uds_datasource': { 'id': self.datasource['id'], 'name': self.datasource['name'] }, 'id': stix_type + '--' + uniq_id, 'type': stix_type, 'objects': {}, } # create normal type objects for ds_key in ds_map: # get the stix keys that are mapped ds_key_def_obj = self.dsToStixMap[ds_key] ds_key_def_list = ds_key_def_obj if isinstance( ds_key_def_obj, list) else [ds_key_def_obj] for ds_key_def in ds_key_def_list: if ds_key_def is None or 'key' not in ds_key_def or 'type' not in ds_key_def: logging.debug( '{} is not valid (None, or missing key and type)'. format(ds_key_def)) continue if ds_key_def['type'] != 'value' or 'cybox' in ds_key_def: continue key_to_add = ds_key_def['key'] transformer = xformers[ds_key_def[ 'transformer']] if 'transformer' in ds_key_def else None linked = ds_key_def[ 'linked'] if 'linked' in ds_key_def else None stix_value = DataSourceObjToStixObj._get_value( obj, ds_key, transformer) if stix_value is None: continue prop_obj = DataSourceObjToStixObj._determine_prop_attr( key_to_add, self.outer_props, self.simple_props) if prop_obj[0] is not None and 'valid_regex' in prop_obj[0]: pattern = re.compile(prop_obj[0]['valid_regex']) if not pattern.match(str(stix_value)): continue # handle when object is linked if linked is not None: observation = DataSourceObjToStixObj._handle_linked( key_to_add, observation, stix_value) elif prop_obj[1] == 'OUTER': observation.update({key_to_add: stix_value}) else: index = (self._add_to_objects(key_to_add, stix_value, observation, index, ds_key, ref_objs, linked, linked_objs, True, None)) # create complex type objects DataSourceObjToStixObj._create_complex_objects(ds_map, xformers, index, observation, ref_objs, linked_objs, obj) # Validate each STIX object if self.stix_validator: validated_result = validate_instance(observation) print_results(validated_result) return observation
def check_stix_file(path): #check the integrity of a stix file results = validate_file(path) print_results(results)
args = parser.parse_args() if args.input: filenames = (f'test_json_{args.input}.json', f'test_stix2_{args.input}.json.stix2') query_import(f'test_stix2_{args.input}.json', args.externalise) else: if not args.output: sys.exit('Please provide an output name for the test files.') output = args.output filenames = [] for return_type in ('json', 'stix2'): args.output = f"test_{return_type}_{output}.json" args.returnFormat = return_type query_misp(args) filenames.append(args.output) to_delete = [filename for filename in filenames] stix_analyse = validate_file(filenames[1]) print_results(stix_analyse) query_import(filenames[1], args.externalise) filenames[1] = f'{filenames[1]}.stix2' to_delete.append(filenames[1]) comparer = Comparer(*filenames) comparer.compare_attributes() comparer.compare_objects() comparer.compare_tags() comparer.compare_galaxies() comparer.compare_references() if args.delete and not args.input: for filename in to_delete: os.remove(filename)