def convert(filename, output='output.json'): # Create the default author author = Identity(name='The MITRE Corporation', identity_class='organization') count = 0 with open(filename) as json_file: vulnerabilities_bundle = [author] data = json.load(json_file) print("Loaded the file") for cves in data['CVE_Items']: count += 1 # Get the name name = cves['cve']['CVE_data_meta']['ID'] # Create external references external_reference = ExternalReference( source_name='NIST NVD', url='https://nvd.nist.gov/vuln/detail/' + name) external_references = [external_reference] for reference in cves['cve']['references']['reference_data']: external_reference = ExternalReference( source_name=reference['refsource'], url=reference['url']) external_references.append(external_reference) # Getting the different fields description = cves['cve']['description']['description_data'][0][ "value"] cdate = cves['publishedDate'] mdate = cves['lastModifiedDate'] # Creating the vulnerability with the extracted fields vuln = Vulnerability(name=name, created=cdate, modified=mdate, description=description, created_by_ref=author, external_references=external_references) # Adding the vulnerability to the list of vulnerabilities vulnerabilities_bundle.append(vuln) # Creating the bundle from the list of vulnerabilities bundle = Bundle(vulnerabilities_bundle) # Creating a MemoryStore object from the bundle memorystore = MemoryStore(bundle) # Dumping this object to a file memorystore.save_to_file(output) print("Successfully converted " + str(count) + " vulnerabilities")
def create_external_reference( source_name: str, url: str, external_id: Optional[str] = None) -> ExternalReference: """Create an external reference.""" return ExternalReference(source_name=source_name, url=url, external_id=external_id)
def amitt_identity(self): """ """ threat_actors = self.identities.itertuples() for i in threat_actors: if i.id == "ID00000": continue external_references = [] if i.type == "identity": refs = self.parse_xlsx_reference_tuples(i.references) for ref in refs: try: reference = ExternalReference( source_name=ref[1], url=ref[2], external_id=ref[0] ) external_references.append(reference) except IndexError: pass try: created_date = datetime.strptime(i.whenAdded, "%Y-%m-%d") except: created_date = datetime.now() identity = Identity( name=i.name, description=i.summary, identity_class=i.identityClass, sectors=i.sectors, contact_information=i.contactInformation, created=created_date, custom_properties={ # "x_published": i.whenAdded, # "x_source": i.sourceCountry, # "x_target": i.targetCountry, "x_identified_via": i.foundVia }, external_references=external_references ) self.stix_objects.append(identity) self.stix_identity_uuid[i.id] = identity.id
def amitt_actor(self): """ """ threat_actors = self.actors.itertuples() for i in threat_actors: if i.id == "I00000": continue external_references = [] if i.type == "threat-actor": refs = self.parse_xlsx_reference_tuples(i.references) for ref in refs: try: reference = ExternalReference( source_name=ref[1], url=ref[2], external_id=ref[0] ) external_references.append(reference) except IndexError: pass try: created_date = datetime.strptime(i.whenAdded, "%Y-%m-%d") except: created_date = datetime.now() threat_actor = ThreatActor( name=i.name, description=i.summary, labels=i.labels.split(","), created=created_date, custom_properties={ # "x_published": i.whenAdded, # "x_first_seen": datetime.strptime(str(int(i.firstSeen)), "%Y"), # "x_source": i.sourceCountry, # "x_target": i.targetCountry, "x_identified_via": i.foundVia }, external_references=external_references ) self.stix_objects.append(threat_actor) self.stix_threat_actor_uuid[i.id] = threat_actor.id
def amitt_campaign(self): """ """ campaigns = self.campaigns.itertuples() for i in campaigns: if i.id == "I00000": continue external_references = [] if i.type == "campaign": refs = self.parse_xlsx_reference_tuples(i.references) for ref in refs: try: reference = ExternalReference( source_name=ref[1], url=ref[2], external_id=ref[0] ) external_references.append(reference) except IndexError: pass try: created_date = datetime.strptime(i.whenAdded, "%Y-%m-%d") except: created_date = datetime.now() campaign = Campaign( name=i.name, description=i.summary, first_seen=datetime.strptime(str(int(i.firstSeen)), "%Y"), created=created_date, custom_properties={ # "x_published": i.whenAdded, # "x_source": i.sourceCountry, # "x_target": i.targetCountry, "x_identified_via": i.foundVia }, external_references=external_references ) self.stix_objects.append(campaign) self.stix_campaign_uuid[i.id] = campaign.id
def convert(parse_data, output='output.json'): # Create the default author author = Identity(name='The MS Bulletin Corporation', identity_class='organization') print(author) count = 0 vulnerabilities_bundle = [author] # Getting modified date mdate = parse_data["rss"]["channel"]["lastBuildDate"] for msb in parse_data["rss"]["channel"]["item"]: count += 1 # Get the name name = msb["title"] # Getting the create date cdate = msb["pubDate"] # Getting description description = msb["description"] # Create external references external_references = ExternalReference( source_name="Microsoft Security Bulletin", url=msb["link"] ) # Creating the vulnerability with the extracted fields vuln = Vulnerability( name=name, created=cdate, modified=mdate, description=description, created_by_ref=author, external_references=external_references ) # Adding the vulnerability to the list of vulnerabilities vulnerabilities_bundle.append(vuln) # Creating the bundle from the list of vulnerabilities bundle = Bundle(vulnerabilities_bundle) # Creating a MemoryStore object from the bundle memorystore = MemoryStore(bundle) # Dumping this object to a file memorystore.save_to_file(output) print("Successfully converted " + str(count) + " vulnerabilities")
def process_events(self, events): for event in events: self.helper.log_info("Processing event " + event["Event"]["uuid"]) ### Default variables added_markings = [] added_entities = [] added_object_refs = [] ### Pre-process # Author author = Identity( name=event["Event"]["Orgc"]["name"], identity_class="organization" ) # Elements event_elements = self.prepare_elements(event["Event"]["Galaxy"], author) # Markings if "Tag" in event["Event"]: event_markings = self.resolve_markings(event["Event"]["Tag"]) else: event_markings = [TLP_WHITE] # Tags event_tags = [] if "Tag" in event["Event"]: event_tags = self.resolve_tags(event["Event"]["Tag"]) # ExternalReference event_external_reference = ExternalReference( source_name=self.helper.connect_name, external_id=event["Event"]["uuid"], url=self.misp_url + "/events/view/" + event["Event"]["uuid"], ) ### Get indicators event_external_references = [event_external_reference] indicators = [] # Get attributes for attribute in event["Event"]["Attribute"]: indicator = self.process_attribute( author, event_elements, event_markings, [], attribute ) if attribute["type"] == "link": event_external_references.append( ExternalReference( source_name=attribute["category"], external_id=attribute["uuid"], url=attribute["value"], ) ) if indicator is not None: indicators.append(indicator) # Get attributes of objects objects_relationships = [] for object in event["Event"]["Object"]: attribute_external_references = [] for attribute in object["Attribute"]: if attribute["type"] == "link": attribute_external_references.append( ExternalReference( source_name=attribute["category"], external_id=attribute["uuid"], url=attribute["value"], ) ) object_attributes = [] for attribute in object["Attribute"]: indicator = self.process_attribute( author, event_elements, event_markings, attribute_external_references, attribute, ) if indicator is not None: indicators.append(indicator) if ( object["meta-category"] == "file" and indicator["indicator"].x_opencti_observable_type in FILETYPES ): object_attributes.append(indicator) objects_relationships.extend( self.process_observable_relations(object_attributes, []) ) ### Prepare the bundle bundle_objects = [author] object_refs = [] # Add event markings for event_marking in event_markings: if event_marking["id"] not in added_markings: bundle_objects.append(event_marking) added_markings.append(event_marking["id"]) # Add event elements all_event_elements = ( event_elements["intrusion_sets"] + event_elements["malwares"] + event_elements["tools"] + event_elements["attack_patterns"] ) for event_element in all_event_elements: if event_element["name"] not in added_object_refs: object_refs.append(event_element) added_object_refs.append(event_element["name"]) if event_element["name"] not in added_entities: bundle_objects.append(event_element) added_entities.append(event_element["name"]) # Add indicators for indicator in indicators: if indicator["indicator"]["id"] not in added_object_refs: object_refs.append(indicator["indicator"]) added_object_refs.append(indicator["indicator"]["id"]) if indicator["indicator"]["id"] not in added_entities: bundle_objects.append(indicator["indicator"]) added_entities.append(indicator["indicator"]["id"]) # Add attribute markings for attribute_marking in indicator["markings"]: if attribute_marking["id"] not in added_markings: bundle_objects.append(attribute_marking) added_markings.append(attribute_marking["id"]) # Add attribute elements all_attribute_elements = ( indicator["attribute_elements"]["intrusion_sets"] + indicator["attribute_elements"]["malwares"] + indicator["attribute_elements"]["tools"] + indicator["attribute_elements"]["attack_patterns"] ) for attribute_element in all_attribute_elements: if attribute_element["name"] not in added_object_refs: object_refs.append(attribute_element) added_object_refs.append(attribute_element["name"]) if attribute_element["name"] not in added_entities: bundle_objects.append(attribute_element) added_entities.append(attribute_element["name"]) # Add attribute relationships for relationship in indicator["relationships"]: object_refs.append(relationship) bundle_objects.append(relationship) # Add object_relationships for object_relationship in objects_relationships: bundle_objects.append(object_relationship) ### Create the report if needed if self.misp_create_report and len(object_refs) > 0: report = Report( name=event["Event"]["info"], description=event["Event"]["info"], published=parse(event["Event"]["date"]), created_by_ref=author, object_marking_refs=event_markings, labels=["threat-report"], object_refs=object_refs, external_references=event_external_references, custom_properties={ "x_opencti_report_class": self.misp_report_class, "x_opencti_object_status": 2, "x_opencti_tags": event_tags, }, ) bundle_objects.append(report) bundle = Bundle(objects=bundle_objects).serialize() self.helper.log_info("Sending event STIX2 bundle") self.helper.send_stix2_bundle( bundle, None, self.update_existing_data, False )
def process_events(self, events): for event in events: ### Default variables added_markings = [] added_entities = [] added_object_refs = [] ### Pre-process # Author author = Identity(name=event['Event']['Orgc']['name'], identity_class='organization') # Elements event_elements = self.prepare_elements(event['Event']['Galaxy']) # Markings if 'Tag' in event['Event']: event_markings = self.resolve_markings(event['Event']['Tag']) else: event_markings = [TLP_WHITE] # ExternalReference event_external_reference = ExternalReference( source_name=self.helper.connect_name, external_id=event['Event']['uuid'], url=self.misp_url + '/events/view/' + event['Event']['uuid']) ### Get indicators indicators = [] # Get attributes for attribute in event['Event']['Attribute']: indicator = self.process_attribute(author, event_elements, event_markings, attribute) if indicator is not None: indicators.append(indicator) # Get attributes of objects objects_relationships = [] for object in event['Event']['Object']: object_attributes = [] for attribute in object['Attribute']: indicator = self.process_attribute(author, event_elements, event_markings, attribute) if indicator is not None: indicators.append(indicator) if object['meta-category'] == 'file' and indicator[ 'indicator'].x_opencti_observable_type in FILETYPES: object_attributes.append(indicator) objects_relationships.extend(self.process_observable_relations(object_attributes, [])) ### Prepare the bundle bundle_objects = [author] object_refs = [] # Add event markings for event_marking in event_markings: if event_marking['id'] not in added_markings: bundle_objects.append(event_marking) added_markings.append(event_marking['id']) # Add event elements all_event_elements = \ event_elements['intrusion_sets'] + \ event_elements['malwares'] + \ event_elements['tools'] + \ event_elements['attack_patterns'] for event_element in all_event_elements: if event_element['name'] not in added_object_refs: object_refs.append(event_element) added_object_refs.append(event_element['name']) if event_element['name'] not in added_entities: bundle_objects.append(event_element) added_entities.append(event_element['name']) # Add indicators for indicator in indicators: if indicator['indicator']['id'] not in added_object_refs: object_refs.append(indicator['indicator']) added_object_refs.append(indicator['indicator']['id']) if indicator['indicator']['id'] not in added_entities: bundle_objects.append(indicator['indicator']) added_entities.append(indicator['indicator']['id']) # Add attribute markings for attribute_marking in indicator['markings']: if attribute_marking['id'] not in added_markings: bundle_objects.append(attribute_marking) added_markings.append(attribute_marking['id']) # Add attribute elements all_attribute_elements = \ indicator['attribute_elements']['intrusion_sets'] + \ indicator['attribute_elements']['malwares'] + \ indicator['attribute_elements']['tools'] + \ indicator['attribute_elements']['attack_patterns'] for attribute_element in all_attribute_elements: if attribute_element['name'] not in added_object_refs: object_refs.append(attribute_element) added_object_refs.append(attribute_element['name']) if attribute_element['name'] not in added_entities: bundle_objects.append(attribute_element) added_entities.append(attribute_element['name']) # Add attribute relationships for relationship in indicator['relationships']: object_refs.append(relationship) bundle_objects.append(relationship) # Add object_relationships for object_relationship in objects_relationships: bundle_objects.append(object_relationship) ### Create the report if needed if self.misp_create_report and len(object_refs) > 0: report = Report( name=event['Event']['info'], description=event['Event']['info'], published=parse(event['Event']['date']), created_by_ref=author, object_marking_refs=event_markings, labels=['threat-report'], object_refs=object_refs, external_references=[event_external_reference], custom_properties={ 'x_opencti_report_class': self.misp_report_class, 'x_opencti_object_status': 2 } ) bundle_objects.append(report) bundle = Bundle(objects=bundle_objects).serialize() self.helper.send_stix2_bundle(bundle, None, self.update_existing_data, False)
def convert(filename, output="output.json"): # Create the default author author = Identity(name="The MITRE Corporation", identity_class="organization") count = 0 with open(filename) as json_file: vulnerabilities_bundle = [author] data = json.load(json_file) for cves in data["CVE_Items"]: count += 1 # Get the name name = cves["cve"]["CVE_data_meta"]["ID"] # Create external references external_reference = ExternalReference( source_name="NIST NVD", url="https://nvd.nist.gov/vuln/detail/" + name) external_references = [external_reference] for reference in cves["cve"]["references"]["reference_data"]: external_reference = ExternalReference( source_name=reference["refsource"], url=reference["url"]) external_references.append(external_reference) # Getting the different fields description = cves["cve"]["description"]["description_data"][0][ "value"] base_score = (cves["impact"]["baseMetricV3"]["cvssV3"]["baseScore"] if "baseMetricV3" in cves["impact"] else None) base_severity = ( cves["impact"]["baseMetricV3"]["cvssV3"]["baseSeverity"] if "baseMetricV3" in cves["impact"] else None) attack_vector = ( cves["impact"]["baseMetricV3"]["cvssV3"]["attackVector"] if "baseMetricV3" in cves["impact"] else None) integrity_impact = ( cves["impact"]["baseMetricV3"]["cvssV3"]["integrityImpact"] if "baseMetricV3" in cves["impact"] else None) availability_impact = ( cves["impact"]["baseMetricV3"]["cvssV3"]["availabilityImpact"] if "baseMetricV3" in cves["impact"] else None) cdate = datetime.datetime.strptime(cves["publishedDate"], "%Y-%m-%dT%H:%MZ") mdate = datetime.datetime.strptime(cves["lastModifiedDate"], "%Y-%m-%dT%H:%MZ") # Creating the vulnerability with the extracted fields vuln = Vulnerability( id=OpenCTIStix2Utils.generate_random_stix_id("vulnerability"), name=name, created=cdate, modified=mdate, description=description, created_by_ref=author, external_references=external_references, custom_properties={ "x_opencti_base_score": base_score, "x_opencti_base_severity": base_severity, "x_opencti_attack_vector": attack_vector, "x_opencti_integrity_impact": integrity_impact, "x_opencti_availability_impact": availability_impact, }, ) # Adding the vulnerability to the list of vulnerabilities vulnerabilities_bundle.append(vuln) # Creating the bundle from the list of vulnerabilities bundle = Bundle(vulnerabilities_bundle) bundle_json = bundle.serialize() # Write to file with open(output, "w") as f: f.write(bundle_json)
def run(self): self.helper.log_info("Fetching URLhaus dataset...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.urlhaus_interval) - 1) * 60 * 60 * 24)): self.helper.log_info("Connector will run!") now = datetime.utcfromtimestamp(timestamp) friendly_name = "URLhaus run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) try: response = urllib.request.urlopen( self.urlhaus_csv_url, context=ssl.create_default_context( cafile=certifi.where()), ) image = response.read() with open( os.path.dirname(os.path.abspath(__file__)) + "/data.csv", "wb", ) as file: file.write(image) fp = open( os.path.dirname(os.path.abspath(__file__)) + "/data.csv", "r", ) rdr = csv.reader(filter(lambda row: row[0] != "#", fp)) bundle_objects = [] for row in rdr: if row[3] == "online" or self.urlhaus_import_offline: external_reference = ExternalReference( source_name="Abuse.ch URLhaus", url=row[6], description="URLhaus repository URL", ) stix_observable = SimpleObservable( id=OpenCTIStix2Utils. generate_random_stix_id( "x-opencti-simple-observable"), key="Url.value", value=row[2], description=row[4], x_opencti_score=80, object_marking_refs=[TLP_WHITE], labels=row[5].split(","), created_by_ref=self. identity["standard_id"], x_opencti_create_indicator=True, external_references=[external_reference], ) bundle_objects.append(stix_observable) fp.close() bundle = Bundle(objects=bundle_objects).serialize() self.helper.send_stix2_bundle( bundle, entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) if os.path.exists( os.path.dirname(os.path.abspath(__file__)) + "/data.csv"): os.remove( os.path.dirname(os.path.abspath(__file__)) + "/data.csv") except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
def create_external_reference(source_name: str, external_id: str, url: str) -> ExternalReference: """Create an external reference.""" return ExternalReference(source_name=source_name, external_id=external_id, url=url)
def main(): user_args = demisto.args().get('indicators', 'Unknown') doubleBackslash = demisto.args().get('doubleBackslash', True) all_args = {} if isinstance(user_args, dict): all_args = json.loads(json.dumps(user_args)) else: try: all_args = json.loads(demisto.args().get('indicators', 'Unknown')) except: # noqa: E722 return_error('indicators argument is invalid json object') indicators = [] for indicator_fields in all_args: kwargs: dict[str, Any] = {"allow_custom": True} demisto_indicator_type = all_args[indicator_fields].get( 'indicator_type', 'Unknown') if doubleBackslash: value = all_args[indicator_fields].get('value', '').replace('\\', r'\\') else: value = all_args[indicator_fields].get('value', '') demisto_score = all_args[indicator_fields].get('score', '').lower() if demisto_score in ["bad", "malicious"]: kwargs["score"] = "High" elif demisto_score == "suspicious": kwargs["score"] = "Medium" elif demisto_score in ["good", "benign"]: kwargs["score"] = "None" else: kwargs["score"] = "Not Specified" kwargs["created"] = dateparser.parse(all_args[indicator_fields].get( 'timestamp', '')) kwargs["modified"] = dateparser.parse(all_args[indicator_fields].get( 'lastSeen', f'{kwargs["created"]}')) kwargs["id"] = all_args[indicator_fields].get('stixid', '') kwargs["labels"] = [demisto_indicator_type.lower()] kwargs["description"] = all_args[indicator_fields].get( 'description', '') kwargs = {k: v for k, v in kwargs.items() if v} # Removing keys with empty strings try: indicator_type = demisto_indicator_type.lower().replace("-", "") indicator = Indicator( pattern=f"[{SCOs[indicator_type]} = '{value}']", pattern_type='stix', **kwargs) indicators.append(indicator) except KeyError: demisto.debug( f"{demisto_indicator_type} isn't an SCO checking other IOC types" ) try: indicator_type = demisto_indicator_type.lower() if indicator_type == 'cve': kwargs["external_references"] = [ ExternalReference(source_name="cve", external_id=value) ] elif indicator_type == "attack pattern": try: mitreid = all_args[indicator_fields].get('mitreid', '') if mitreid: kwargs["external_references"] = [ ExternalReference(source_name="mitre", external_id=mitreid) ] except KeyError: pass indicator = SDOs[indicator_type](name=value, **kwargs) indicators.append(indicator) except (KeyError, TypeError) as e: demisto.info( "Indicator type: {}, with the value: {} is not STIX compatible" .format(demisto_indicator_type, value)) demisto.info("Export failure excpetion: {}".format(e)) continue if len(indicators) > 1: bundle = Bundle(indicators) context = { 'StixExportedIndicators(val.pattern && val.pattern == obj.pattern)': json.loads(str(bundle)) } res = (CommandResults(readable_output="", outputs=context, raw_response=str(bundle))) elif len(indicators) == 1: context = { 'StixExportedIndicators(val.pattern && val.pattern == obj.pattern)': json.loads(str(indicators[0])) } res = (CommandResults(readable_output="", outputs=context, raw_response=str(indicators[0]))) else: context = {'StixExportedIndicators': {}} res = CommandResults(readable_output="", outputs=context, raw_response={}) return_results(res)
def process_yara_rules(self) -> None: try: rules_json = self.valhalla_client.get_rules_json() response = ApiResponse.parse_obj(rules_json) except Exception as err: self.helper.log_error(f"error downloading rules: {err}") return None for yr in response.rules: # Handle reference URLs supplied by the Valhalla API refs = [] if yr.reference is not None and yr.reference != "" and yr.reference != "-": try: san_url = urlparse(yr.reference) ref = ExternalReference( source_name="Nextron Systems Valhalla API", url=san_url.geturl(), description="Rule Reference: " + san_url.geturl(), ) refs.append(ref) except Exception: self.helper.log_error( f"error parsing ref url: {yr.reference}") continue indicator = Indicator( name=yr.name, description=yr.cti_description, pattern_type="yara", pattern=yr.content, labels=yr.tags, valid_from=yr.cti_date, valid_until=datetime.utcnow() + relativedelta(years=2), object_marking_refs=[self.default_marking], created_by_ref=self.organization, confidence=self.confidence_level, external_references=refs, custom_properties={ "x_opencti_main_observable_type": "StixFile", "x_opencti_detection": True, "x_opencti_score": yr.score, }, ) self.bundle_objects.append(indicator) # Handle Tags - those include MITRE ATT&CK tags that we want to # create relationships for for tag in yr.tags: # handle Mitre ATT&CK relation indicator <-> attack-pattern if re.search(r"^T\d{4}$", tag): attack_pattern_id = self._ATTACK_MAPPING.get(tag) if attack_pattern_id is None or attack_pattern_id == "": self.helper.log_info( f"no attack_pattern found for {tag}") return None ap_rel = Relationship( relationship_type="indicates", source_ref=indicator, target_ref=attack_pattern_id, description="Yara Rule from Valhalla API", created_by_ref=self.organization, confidence=self.confidence_level, object_marking_refs=[self.default_marking], ) self.bundle_objects.append(ap_rel) # handle Mitre ATT&CK group relation indicator <-> intrusion-set if re.search(r"^G\d{4}$", tag): intrusion_set_id = self._ATTACK_MAPPING.get(tag) if intrusion_set_id == "" or intrusion_set_id is None: self.helper.log_info( f"no intrusion_set found for {tag}") return None is_rel = Relationship( relationship_type="indicates", source_ref=indicator, target_ref=intrusion_set_id, description="Yara Rule from Valhalla API", created_by_ref=self.organization, confidence=self.confidence_level, object_marking_refs=[self.default_marking], ) self.bundle_objects.append(is_rel)
def process_events(self, events): for event in events: generic_actor = ThreatActor( name='Unknown threats', labels=['threat-actor'], description= 'All unknown threats are represented by this pseudo threat actor. This entity helps to organize knowledge and indicators that could not be attributed to any other threats.' ) added_threats = [] added_markings = [] # Default values author = Identity(name=event['Event']['Orgc']['name'], identity_class='organization') report_threats = self.prepare_threats(event['Event']['Galaxy']) if 'Tag' in event['Event']: report_markings = self.resolve_markings(event['Event']['Tag']) else: report_markings = [] reference_misp = ExternalReference( source_name=self.helper.connect_name, url=self.misp_url + '/events/view/' + event['Event']['uuid']) # Get all attributes indicators = [] for attribute in event['Event']['Attribute']: indicator = self.process_attribute(author, report_threats, attribute, generic_actor) if indicator is not None: indicators.append(indicator) # get all attributes of object for object in event['Event']['Object']: for attribute in object['Attribute']: indicator = self.process_attribute(author, report_threats, attribute, generic_actor) if indicator is not None: indicators.append(indicator) bundle_objects = [author] report_refs = [] for report_marking in report_markings: if report_marking['id'] not in added_markings: bundle_objects.append(report_marking) added_markings.append(report_marking['id']) for report_threat in report_threats: report_refs.append(report_threat) bundle_objects.append(report_threat) added_threats.append(report_threat['name']) for indicator in indicators: report_refs.append(indicator['indicator']) bundle_objects.append(indicator['indicator']) for attribute_threat in indicator['attribute_threats']: if attribute_threat['name'] not in added_threats: report_refs.append(attribute_threat) bundle_objects.append(attribute_threat) added_threats.append(attribute_threat['name']) for marking in indicator['markings']: if marking['id'] not in added_markings: bundle_objects.append(marking) added_markings.append(marking['id']) for relationship in indicator['relationships']: report_refs.append(relationship) bundle_objects.append(relationship) if len(report_refs) > 0: report = Report(name=event['Event']['info'], description=event['Event']['info'], published=parse(event['Event']['date']), created_by_ref=author, object_marking_refs=report_markings, labels=['threat-report'], object_refs=report_refs, external_references=[reference_misp], custom_properties={ 'x_opencti_report_class': 'Threat Report' }) bundle_objects.append(report) bundle = Bundle(objects=bundle_objects).serialize() self.helper.send_stix2_bundle(bundle) if self.misp_untag_event: self.misp.untag(event['Event']['uuid'], self.misp_tag) self.misp.tag(event['Event']['uuid'], self.misp_imported_tag)
def run(self): self.helper.log_info("Fetching VXVault dataset...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.vxvault_interval) - 1) * 60 * 60 * 24)): self.helper.log_info("Connector will run!") now = datetime.utcfromtimestamp(timestamp) friendly_name = "VXVault run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) try: response = urllib.request.urlopen( self.vxvault_url, context=ssl.create_default_context( cafile=certifi.where()), ) image = response.read() with open( os.path.dirname(os.path.abspath(__file__)) + "/data.txt", "wb", ) as file: file.write(image) count = 0 bundle_objects = [] with open( os.path.dirname(os.path.abspath(__file__)) + "/data.txt") as fp: for line in fp: count += 1 if count <= 3: continue external_reference = ExternalReference( source_name="VX Vault", url="http://vxvault.net", description="VX Vault repository URL", ) stix_observable = SimpleObservable( id=OpenCTIStix2Utils. generate_random_stix_id( "x-opencti-simple-observable"), key="Url.value", value=line, description="VX Vault URL", x_opencti_score=80, object_marking_refs=[TLP_WHITE], created_by_ref=self. identity["standard_id"], x_opencti_create_indicator=self. create_indicators, external_references=[external_reference], ) bundle_objects.append(stix_observable) bundle = Bundle(objects=bundle_objects, allow_custom=True).serialize() self.helper.send_stix2_bundle( bundle, update=self.update_existing_data, work_id=work_id, ) if os.path.exists( os.path.dirname(os.path.abspath(__file__)) + "/data.txt"): os.remove( os.path.dirname(os.path.abspath(__file__)) + "/data.txt") except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
pattern_type='stix') except Exception as ex: demisto.info( "Indicator type: {}, with the value: {} is not STIX compatible" .format(demisto_indicator_type, value)) demisto.info("Export failure excpetion: {}".format(ex)) continue indicators.append(indicator) else: try: vulnerability = Vulnerability( name=stix_type_and_value, description=label_as_type, labels=[label_as_type], external_references=[ ExternalReference(source_name="cve", external_id=stix_type_and_value) ]) except Exception as ex: demisto.info( "Indicator type: {}, with the value: {} is not STIX compatible" .format(demisto_indicator_type, value)) demisto.info("Export failure excpetion: {}".format(ex)) continue indicators.append(vulnerability) counter += 1 if counter > 1: bundle = Bundle(indicators) context = { 'StixExportedIndicators(val.pattern && val.pattern == obj.pattern)': json.loads(str(bundle)) }