class Mitre: """Mitre connector.""" def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.mitre_enterprise_file_url = get_config_variable( "MITRE_ENTERPRISE_FILE_URL", ["mitre", "enterprise_file_url"], config) self.mitre_pre_attack_file_url = get_config_variable( "MITRE_PRE_ATTACK_FILE_URL", ["mitre", "pre_attack_file_url"], config) self.mitre_mobile_attack_file_url = get_config_variable( "MITRE_MOBILE_ATTACK_FILE_URL", ["mitre", "mobile_attack_file_url"], config) self.mitre_ics_attack_file_url = get_config_variable( "MITRE_ICS_ATTACK_FILE_URL", ["mitre", "ics_attack_file_url"], config) self.mitre_interval = get_config_variable("MITRE_INTERVAL", ["mitre", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, ) def get_interval(self): return int(self.mitre_interval) * 60 * 60 * 24 def retrieve_data(self, url: str) -> Optional[str]: """ Retrieve data from the given url. Parameters ---------- url : str Url to retrieve. Returns ------- str A string with the content or None in case of failure. """ try: return (urllib.request.urlopen( url, context=ssl.create_default_context(cafile=certifi.where()), ).read().decode("utf-8")) except ( urllib.error.URLError, urllib.error.HTTPError, urllib.error.ContentTooShortError, ) as urllib_error: self.helper.log_error( f"Error retrieving url {url}: {urllib_error}") return None # Add confidence to every object in a bundle def add_confidence_to_bundle_objects(self, serialized_bundle: str) -> str: # the list of object types for which the confidence has to be added # (skip marking-definition, identity, external-reference-as-report) object_types_with_confidence = [ "attack-pattern", "course-of-action", "intrusion-set", "campaign", "malware", "tool", "report", "relationship", ] stix_bundle = json.loads(serialized_bundle) for obj in stix_bundle["objects"]: object_type = obj["type"] if object_type in object_types_with_confidence: # self.helper.log_info(f"Adding confidence to {object_type} object") obj["confidence"] = int(self.confidence_level) return json.dumps(stix_bundle) def process_data(self): try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.mitre_interval) - 1) * 60 * 60 * 24)): self.helper.log_info("Connector will run!") now = datetime.utcfromtimestamp(timestamp) friendly_name = "MITRE run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) # Mitre enterprise file url if (self.mitre_enterprise_file_url is not None and len(self.mitre_enterprise_file_url) > 0): enterprise_data = self.retrieve_data( self.mitre_enterprise_file_url) enterprise_data_with_confidence = ( self.add_confidence_to_bundle_objects(enterprise_data)) self.send_bundle(work_id, enterprise_data_with_confidence) # Mitre pre attack file url if (self.mitre_pre_attack_file_url is not None and len(self.mitre_pre_attack_file_url) > 0): pre_attack_data = self.retrieve_data( self.mitre_pre_attack_file_url) pre_attack_data_with_confidence = ( self.add_confidence_to_bundle_objects(pre_attack_data)) self.send_bundle(work_id, pre_attack_data_with_confidence) # Mitre mobile attack file url if (self.mitre_mobile_attack_file_url is not None and len(self.mitre_mobile_attack_file_url) > 0): mobile_attack_data = self.retrieve_data( self.mitre_mobile_attack_file_url) mobile_attack_data_with_confidence = ( self.add_confidence_to_bundle_objects( mobile_attack_data)) self.send_bundle(work_id, mobile_attack_data_with_confidence) # Mitre ics attack file url if (self.mitre_ics_attack_file_url is not None and len(self.mitre_ics_attack_file_url) > 0): ics_attack_data = self.retrieve_data( self.mitre_ics_attack_file_url) ics_attack_data_with_confidence = ( self.add_confidence_to_bundle_objects(ics_attack_data)) self.send_bundle(work_id, ics_attack_data_with_confidence) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info("Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") sys.exit(0) except Exception as e: self.helper.log_error(str(e)) def run(self): self.helper.log_info("Fetching MITRE datasets...") get_run_and_terminate = getattr(self.helper, "get_run_and_terminate", None) if callable( get_run_and_terminate) and self.helper.get_run_and_terminate(): self.process_data() self.helper.force_ping() else: while True: self.process_data() time.sleep(60) def send_bundle(self, work_id: str, serialized_bundle: str) -> None: try: self.helper.send_stix2_bundle( serialized_bundle, entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) except Exception as e: self.helper.log_error(f"Error while sending bundle: {e}")
class Cryptolaemus: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.interval = 1 # 1 Day interval between each scraping self.helper = OpenCTIConnectorHelper(config) # Extra config self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, isNumber=True, ) self.data = {} def get_interval(self): return int(self.interval) * 60 * 60 * 24 def next_run(self, seconds): return def run(self): self.helper.log_info("Fetching Cryptolaemus Emotet's datasets...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.interval) - 1) * 60 * 60 * 24)): self.helper.log_info("Connector will run!") ## CORE ## # get feed content feed = feedparser.parse( "https://paste.cryptolaemus.com/feed.xml") # variables Epoch1C2 = [] # List of C2 of Epoch1 Botnet Epoch2C2 = [] # List of C2 of Epoch2 Botnet Epoch3C2 = [] # List of C2 of Epoch3 Botnet # We will only extract the last item source = feed["items"][0][ "id"] # Source of the data (id field in the rss feed) date = feed["items"][0][ "updated"] # Date of data (updated fild in the rss feed) soup = BeautifulSoup( feed["items"][0]["content"][0]["value"], "lxml" ) # Content (html format) of the rss feed first item # parsing of content's feed (IP:port couples are in HTML <code> with no id an no significant parent node. We get the right content by indicating right <code> in the input array list1 = soup.find_all("code")[0].text.split("\n") list2 = soup.find_all("code")[3].text.split("\n") list3 = soup.find_all("code")[6].text.split("\n") # parsing of the IP:port couples for line in list1: Epoch1C2.append(line.split(":")) for line in list2: Epoch2C2.append(line.split(":")) for line in list3: Epoch3C2.append(line.split(":")) # Agregate self.data = { "Source": source, "Date": date, "Epoch1C2": Epoch1C2, "Epoch2C2": Epoch2C2, "Epoch3C2": Epoch3C2, } # Capitalize Cryptolaemus organization = self.helper.api.identity.create( type="Organization", name="Cryptolaemus Team", description= "Team of Experts collecting and sharing daily update of C2 IP of Emotet's Epochs Botnets.", ) external_reference = self.helper.api.external_reference.create( source_name="Cryptolaemus Team's Emotet C2 update of " + self.data["Date"], url=self.data["Source"], ) malware = self.helper.api.malware.create( name="Emotet", description= "Emotet is a modular malware variant which is primarily used as a downloader for other malware variants such as TrickBot and IcedID. Emotet first emerged in June 2014 and has been primarily used to target the banking sector. (Citation: Trend Micro Banking Malware Jan 2019)", ) # Capitalize Epoch1 C2 for ip in self.data["Epoch1C2"]: indicator = self.helper.api.indicator.create( name=ip[0], description="Botnet Epoch1 C2 IP Adress. Port: " + ip[1], pattern_type="stix", indicator_pattern="[ipv4-addr:value = '" + ip[0] + "']", main_observable_type="IPv4-Addr", valid_from=self.data["Date"], ) if "observableRefsIds" in indicator: for observable_id in indicator[ "observableRefsIds"]: self.helper.api.stix_entity.add_external_reference( id=observable_id, external_reference_id=external_reference[ "id"], ) self.helper.api.stix_entity.add_external_reference( id=indicator["id"], external_reference_id=external_reference["id"], ) relation = self.helper.api.stix_relation.create( fromId=indicator["id"], toId=malware["id"], relationship_type="indicates", first_seen=self.data["Date"], last_seen=self.data["Date"], description= "IP Adress associated to Emotet Epoch1 botnet", weight=self.confidence_level, role_played="C2 Server", createdByRef=organization["id"], ignore_dates=True, update=True, ) self.helper.api.stix_entity.add_external_reference( id=relation["id"], external_reference_id=external_reference["id"], ) # Capitalize Epoch2 C2 for ip in self.data["Epoch2C2"]: indicator = self.helper.api.indicator.create( name=ip[0], description="Botnet Epoch2 C2 IP Adress. Port: " + ip[1], pattern_type="stix", indicator_pattern="[ipv4-addr:value = '" + ip[0] + "']", main_observable_type="IPv4-Addr", valid_from=self.data["Date"], createdByRef=organization["id"], ) self.helper.api.stix_entity.add_external_reference( id=indicator["id"], external_reference_id=external_reference["id"], ) relation = self.helper.api.stix_relation.create( fromType="Indicator", fromId=indicator["id"], toType="Malware", toId=malware["id"], relationship_type="indicates", first_seen=self.data["Date"], last_seen=self.data["Date"], description= "IP Adress associated to Emotet Epoch2 botnet.", weight=self.confidence_level, role_played="C2 Server", createdByRef=organization["id"], ignore_dates=True, update=True, ) self.helper.api.stix_entity.add_external_reference( id=relation["id"], external_reference_id=external_reference["id"], ) # Capitalize Epoch3 C2 for ip in self.data["Epoch3C2"]: indicator = self.helper.api.indicator.create( name=ip[0], description="Botnet Epoch3 C2 IP Adress. Port: " + ip[1], pattern_type="stix", indicator_pattern="[ipv4-addr:value = '" + ip[0] + "']", main_observable_type="IPv4-Addr", valid_from=self.data["Date"], createdByRef=organization["id"], ) self.helper.api.stix_entity.add_external_reference( id=indicator["id"], external_reference_id=external_reference["id"], ) relation = self.helper.api.stix_relation.create( fromType="Indicator", fromId=indicator["id"], toType="Malware", toId=malware["id"], relationship_type="indicates", first_seen=self.data["Date"], last_seen=self.data["Date"], description= "IP Adress associated to Emotet Epoch3 botnet.", weight=self.confidence_level, role_played="C2 Server", createdByRef=organization["id"], ignore_dates=True, update=True, ) self.helper.api.stix_entity.add_external_reference( id=indicator["id"], external_reference_id=external_reference["id"], ) # Store the current timestamp as a last run self.helper.log_info( "Connector successfully run, storing last_run as " + str(timestamp)) self.helper.set_state({"last_run": timestamp}) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class CyberThreatCoalition: _OBSERVABLE_PATH = { "Domain-Name": ["value"], "IPv4-Addr": ["value"], "File_sha256": ["hashes", "SHA-256"], "File_sha1": ["hashes", "SHA-1"], "File_md5": ["hashes", "MD5"], "Url": ["value"], } _INDICATOR_PATTERN = { "Domain-Name": "[domain-name:value = '{}']", "IPv4-Addr": "[ipv4-addr:value = '{}']", "File_sha256": "[file:hashes.SHA-256 = '{}']", "File_sha1": "[file:hashes.SHA-1 = '{}']", "File_md5": "[file:hashes.MD5 = '{}']", "Url": "[url:value = '{}']", } _STATE_LAST_RUN = "last_run" def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.cyber_threat_coalition_interval = get_config_variable( "CYBER_THREAT_COALITION_INTERVAL", ["cyber-threat-coalition", "interval_sec"], config, True, ) self.cyber_threat_coalition_base_url = get_config_variable( "CYBER_THREAT_COALITION_BASE_URL", ["cyber-threat-coalition", "base_url"], config, False, ) self.cyber_threat_coalition_create_indicators = get_config_variable( "CYBER_THREAT_COALITION_CREATE_INDICATORS", ["cyber-threat-coalition", "create_indicators"], config, ) self.cyber_threat_coalition_create_observables = get_config_variable( "CYBER_THREAT_COALITION_CREATE_OBSERVABLES", ["cyber-threat-coalition", "create_observables"], config, ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self) -> int: return int(self.cyber_threat_coalition_interval) @staticmethod def get_hash_type(hash_value): if re.match(r"^[0-9a-fA-F]{32}$", hash_value): return "File_md5" elif re.match(r"^[0-9a-fA-F]{40}$", hash_value): return "File_sha1" elif re.match(r"^[0-9a-fA-F]{64}$", hash_value): return "File_sha256" def fetch_and_send(self): timestamp = int(time.time()) now = datetime.utcfromtimestamp(timestamp) friendly_name = "Cyber Threat Coalition run @ " + now.strftime( "%Y-%m-%d %H:%M:%S" ) work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name ) bundle_objects = list() # create an identity for the coalition team organization = stix2.Identity( id=OpenCTIStix2Utils.generate_random_stix_id("identity"), name="Cyber Threat Coalition Team", identity_class="organization", description="Team of Experts collecting and sharing pandemic related " "cyber threat intelligence during the COVID-19 crisis time", ) # add organization in bundle bundle_objects.append(organization) report_object_refs = list() for collection in ["domain", "ip", "url", "hash"]: # fetch backlist url = self.cyber_threat_coalition_base_url + "/" + str(collection) + ".txt" response = requests.get(url=url) if response.status_code != 200: raise Exception( "Unable to fetch {0} blacklist, server returned status: {1}", collection, response.status_code, ) pattern_type = "stix" labels = ["COVID-19", "malicious-activity"] # parse content for data in response.iter_lines(decode_unicode=True): observable_type = None observable_resolver = None if data and not data.startswith("#"): if collection == "domain": observable_resolver = "Domain-Name" observable_type = "Domain-Name" elif collection == "ip": observable_resolver = "IPv4-Addr" observable_type = "IPv4-Addr" elif collection == "url": observable_resolver = "Url" observable_type = "Url" data = urllib.parse.quote(data, "/:") elif collection == "hash": observable_resolver = self.get_hash_type() observable_type = "File" indicator = None if observable_resolver is None or observable_type is None: return if self.cyber_threat_coalition_create_indicators: indicator = stix2.Indicator( id=OpenCTIStix2Utils.generate_random_stix_id("indicator"), name=data, pattern_type=pattern_type, pattern=self._INDICATOR_PATTERN[observable_resolver].format( data ), labels=labels, created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], custom_properties={ "x_opencti_main_observable_type": observable_type, }, ) bundle_objects.append(indicator) report_object_refs.append(indicator["id"]) if self.cyber_threat_coalition_create_observables: observable = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable" ), key=observable_type + "." + ".".join(self._OBSERVABLE_PATH[observable_resolver]), value=data, labels=labels, created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], ) bundle_objects.append(observable) report_object_refs.append(observable["id"]) if indicator is not None: relationship = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), relationship_type="based-on", created_by_ref=organization, source_ref=indicator.id, target_ref=observable.id, ) bundle_objects.append(relationship) report_object_refs.append(relationship["id"]) # create a global threat report report_uuid = "report--552b3ae6-8522-409d-8b72-a739bc1926aa" report_external_reference = stix2.ExternalReference( source_name="Cyber Threat Coalition", url="https://www.cyberthreatcoalition.org", external_id="COVID19-CTC", ) if report_object_refs: stix_report = stix2.Report( id=report_uuid, name="COVID-19 Cyber Threat Coalition (CTC) BlackList", type="report", description="This report represents the whole COVID-19 CTC blacklist.", published=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], labels=labels, external_references=[report_external_reference], object_refs=report_object_refs, ) # add report in bundle bundle_objects.append(stix_report) # create stix bundle bundle = stix2.Bundle(objects=bundle_objects) # send data self.helper.send_stix2_bundle( bundle=bundle.serialize(), update=self.update_existing_data, work_id=work_id ) return work_id def _load_state(self) -> Dict[str, Any]: current_state = self.helper.get_state() if not current_state: return {} return current_state def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool: if last_run is None: return True time_diff = current_time - last_run return time_diff >= self.get_interval() @staticmethod def _get_state_value( state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None ) -> Any: if state is not None: return state.get(key, default) return default @staticmethod def _current_unix_timestamp() -> int: return int(time.time()) def run(self): self.helper.log_info("Fetching Cyber Threat Coalition vetted blacklists...") while True: try: timestamp = self._current_unix_timestamp() current_state = self._load_state() self.helper.log_info(f"Loaded state: {current_state}") last_run = self._get_state_value(current_state, self._STATE_LAST_RUN) if self._is_scheduled(last_run, timestamp): # fetch data and send as stix bundle work_id = self.fetch_and_send() new_state = current_state.copy() new_state[self._STATE_LAST_RUN] = self._current_unix_timestamp() message = f"Run done. Storing new state: {new_state}" self.helper.log_info(message) self.helper.api.work.to_processed(work_id, message) self.helper.set_state(new_state) self.helper.log_info( f"State stored, next run in: {self.get_interval()} seconds" ) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( f"Connector will not run, next run in: {new_interval} seconds" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as ex: self.helper.log_error(str(ex)) time.sleep(60)
class CrowdStrike: """CrowdStrike connector.""" _CONFIG_NAMESPACE = "crowdstrike" _CONFIG_BASE_URL = f"{_CONFIG_NAMESPACE}.base_url" _CONFIG_CLIENT_ID = f"{_CONFIG_NAMESPACE}.client_id" _CONFIG_CLIENT_SECRET = f"{_CONFIG_NAMESPACE}.client_secret" _CONFIG_INTERVAL_SEC = f"{_CONFIG_NAMESPACE}.interval_sec" _CONFIG_SCOPES = f"{_CONFIG_NAMESPACE}.scopes" _CONFIG_TLP = f"{_CONFIG_NAMESPACE}.tlp" _CONFIG_CREATE_OBSERVABLES = f"{_CONFIG_NAMESPACE}.create_observables" _CONFIG_CREATE_INDICATORS = f"{_CONFIG_NAMESPACE}.create_indicators" _CONFIG_ACTOR_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.actor_start_timestamp" _CONFIG_REPORT_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.report_start_timestamp" _CONFIG_REPORT_INCLUDE_TYPES = f"{_CONFIG_NAMESPACE}.report_include_types" _CONFIG_REPORT_STATUS = f"{_CONFIG_NAMESPACE}.report_status" _CONFIG_REPORT_TYPE = f"{_CONFIG_NAMESPACE}.report_type" _CONFIG_REPORT_GUESS_MALWARE = f"{_CONFIG_NAMESPACE}.report_guess_malware" _CONFIG_INDICATOR_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.indicator_start_timestamp" _CONFIG_INDICATOR_EXCLUDE_TYPES = f"{_CONFIG_NAMESPACE}.indicator_exclude_types" _CONFIG_INDICATOR_LOW_SCORE = f"{_CONFIG_NAMESPACE}.indicator_low_score" _CONFIG_INDICATOR_LOW_SCORE_LABELS = ( f"{_CONFIG_NAMESPACE}.indicator_low_score_labels") _CONFIG_UPDATE_EXISTING_DATA = "connector.update_existing_data" _CONFIG_SCOPE_ACTOR = "actor" _CONFIG_SCOPE_REPORT = "report" _CONFIG_SCOPE_INDICATOR = "indicator" _CONFIG_SCOPE_YARA_MASTER = "yara_master" _CONFIG_REPORT_STATUS_MAPPING = { "new": 0, "in progress": 1, "analyzed": 2, "closed": 3, } _DEFAULT_CREATE_OBSERVABLES = True _DEFAULT_CREATE_INDICATORS = True _DEFAULT_REPORT_TYPE = "threat-report" _DEFAULT_INDICATOR_LOW_SCORE = 40 _CONNECTOR_RUN_INTERVAL_SEC = 60 _STATE_LAST_RUN = "last_run" def __init__(self) -> None: """Initialize CrowdStrike connector.""" config = self._read_configuration() # CrowdStrike connector configuration base_url = self._get_configuration(config, self._CONFIG_BASE_URL) client_id = self._get_configuration(config, self._CONFIG_CLIENT_ID) client_secret = self._get_configuration(config, self._CONFIG_CLIENT_SECRET) self.interval_sec = self._get_configuration(config, self._CONFIG_INTERVAL_SEC, is_number=True) scopes_str = self._get_configuration(config, self._CONFIG_SCOPES) scopes = set() if scopes_str is not None: scopes = set(convert_comma_separated_str_to_list(scopes_str)) tlp = self._get_configuration(config, self._CONFIG_TLP) tlp_marking = self._convert_tlp_to_marking_definition(tlp) create_observables = self._get_configuration( config, self._CONFIG_CREATE_OBSERVABLES) if create_observables is None: create_observables = self._DEFAULT_CREATE_OBSERVABLES else: create_observables = bool(create_observables) create_indicators = self._get_configuration( config, self._CONFIG_CREATE_INDICATORS) if create_indicators is None: create_indicators = self._DEFAULT_CREATE_INDICATORS else: create_indicators = bool(create_indicators) actor_start_timestamp = self._get_configuration( config, self._CONFIG_ACTOR_START_TIMESTAMP, is_number=True) if is_timestamp_in_future(actor_start_timestamp): raise ValueError("Actor start timestamp is in the future") report_start_timestamp = self._get_configuration( config, self._CONFIG_REPORT_START_TIMESTAMP, is_number=True) if is_timestamp_in_future(report_start_timestamp): raise ValueError("Report start timestamp is in the future") report_status_str = self._get_configuration(config, self._CONFIG_REPORT_STATUS) report_status = self._convert_report_status_str_to_report_status_int( report_status_str) report_type = self._get_configuration(config, self._CONFIG_REPORT_TYPE) if not report_type: report_type = self._DEFAULT_REPORT_TYPE report_include_types_str = self._get_configuration( config, self._CONFIG_REPORT_INCLUDE_TYPES) report_include_types = [] if report_include_types_str is not None: report_include_types = convert_comma_separated_str_to_list( report_include_types_str) report_guess_malware = bool( self._get_configuration(config, self._CONFIG_REPORT_GUESS_MALWARE)) indicator_start_timestamp = self._get_configuration( config, self._CONFIG_INDICATOR_START_TIMESTAMP, is_number=True) if is_timestamp_in_future(indicator_start_timestamp): raise ValueError("Indicator start timestamp is in the future") indicator_exclude_types_str = self._get_configuration( config, self._CONFIG_INDICATOR_EXCLUDE_TYPES) indicator_exclude_types = [] if indicator_exclude_types_str is not None: indicator_exclude_types = convert_comma_separated_str_to_list( indicator_exclude_types_str) indicator_low_score = self._get_configuration( config, self._CONFIG_INDICATOR_LOW_SCORE, is_number=True) if indicator_low_score is None: indicator_low_score = self._DEFAULT_INDICATOR_LOW_SCORE indicator_low_score_labels_str = self._get_configuration( config, self._CONFIG_INDICATOR_LOW_SCORE_LABELS) indicator_low_score_labels = [] if indicator_low_score_labels_str is not None: indicator_low_score_labels = convert_comma_separated_str_to_list( indicator_low_score_labels_str) update_existing_data = bool( self._get_configuration(config, self._CONFIG_UPDATE_EXISTING_DATA)) author = self._create_author() # Create OpenCTI connector helper. self.helper = OpenCTIConnectorHelper(config) # Create CrowdStrike client and importers. client = CrowdStrikeClient(base_url, client_id, client_secret) # Create importers. importers: List[BaseImporter] = [] if self._CONFIG_SCOPE_ACTOR in scopes: actor_importer = ActorImporter( self.helper, client.intel_api.actors, update_existing_data, author, actor_start_timestamp, tlp_marking, ) importers.append(actor_importer) if self._CONFIG_SCOPE_REPORT in scopes: report_importer = ReportImporter( self.helper, client.intel_api.reports, update_existing_data, author, report_start_timestamp, tlp_marking, report_include_types, report_status, report_type, report_guess_malware, ) importers.append(report_importer) if self._CONFIG_SCOPE_INDICATOR in scopes: indicator_importer_config = IndicatorImporterConfig( helper=self.helper, indicators_api=client.intel_api.indicators, reports_api=client.intel_api.reports, update_existing_data=update_existing_data, author=author, default_latest_timestamp=indicator_start_timestamp, tlp_marking=tlp_marking, create_observables=create_observables, create_indicators=create_indicators, exclude_types=indicator_exclude_types, report_status=report_status, report_type=report_type, indicator_low_score=indicator_low_score, indicator_low_score_labels=set(indicator_low_score_labels), ) indicator_importer = IndicatorImporter(indicator_importer_config) importers.append(indicator_importer) if self._CONFIG_SCOPE_YARA_MASTER in scopes: yara_master_importer = YaraMasterImporter( self.helper, client.intel_api.rules, client.intel_api.reports, author, tlp_marking, update_existing_data, report_status, report_type, ) importers.append(yara_master_importer) self.importers = importers @staticmethod def _read_configuration() -> Dict[str, str]: config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/../config.yml" if not os.path.isfile(config_file_path): return {} return yaml.load(open(config_file_path), Loader=yaml.FullLoader) @staticmethod def _create_author() -> Identity: return create_organization("CrowdStrike") @staticmethod def _get_yaml_path(config_name: str) -> List[str]: return config_name.split(".") @staticmethod def _get_environment_variable_name(yaml_path: List[str]) -> str: return "_".join(yaml_path).upper() @classmethod def _get_configuration(cls, config: Dict[str, Any], config_name: str, is_number: bool = False) -> Any: yaml_path = cls._get_yaml_path(config_name) env_var_name = cls._get_environment_variable_name(yaml_path) config_value = get_config_variable(env_var_name, yaml_path, config, isNumber=is_number) return config_value @classmethod def _convert_tlp_to_marking_definition( cls, tlp_value: Optional[str]) -> MarkingDefinition: if tlp_value is None: return DEFAULT_TLP_MARKING_DEFINITION return get_tlp_string_marking_definition(tlp_value) @classmethod def _convert_report_status_str_to_report_status_int( cls, report_status: str) -> int: return cls._CONFIG_REPORT_STATUS_MAPPING[report_status.lower()] def _load_state(self) -> Dict[str, Any]: current_state = self.helper.get_state() if not current_state: return {} return current_state @staticmethod def _get_state_value(state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None) -> Any: if state is not None: return state.get(key, default) return default @classmethod def _sleep(cls, delay_sec: Optional[int] = None) -> None: sleep_delay = (delay_sec if delay_sec is not None else cls._CONNECTOR_RUN_INTERVAL_SEC) time.sleep(sleep_delay) def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool: if last_run is None: self._info("CrowdStrike connector clean run") return True time_diff = current_time - last_run return time_diff >= self._get_interval() @staticmethod def _current_unix_timestamp() -> int: return int(time.time()) def run(self): """Run CrowdStrike connector.""" self._info("Starting CrowdStrike connector...") if not self.importers: self._error("Scope(s) not configured.") return while True: self._info("Running CrowdStrike connector...") run_interval = self._CONNECTOR_RUN_INTERVAL_SEC try: timestamp = self._current_unix_timestamp() current_state = self._load_state() self.helper.log_info(f"Loaded state: {current_state}") last_run = self._get_state_value(current_state, self._STATE_LAST_RUN) if self._is_scheduled(last_run, timestamp): work_id = self._initiate_work(timestamp) new_state = current_state.copy() for importer in self.importers: importer_state = importer.start(work_id, new_state) new_state.update(importer_state) self._info("Storing updated new state: {0}", new_state) self.helper.set_state(new_state) new_state[ self._STATE_LAST_RUN] = self._current_unix_timestamp() self._info("Storing new state: {0}", new_state) self.helper.set_state(new_state) message = ( f"State stored, next run in: {self._get_interval()} seconds" ) self._info(message) self._complete_work(work_id, message) else: next_run = self._get_interval() - (timestamp - last_run) run_interval = min(run_interval, next_run) self._info( "Connector will not run, next run in: {0} seconds", next_run) self._sleep(delay_sec=run_interval) except (KeyboardInterrupt, SystemExit): self._info("CrowdStrike connector stopping...") exit(0) except Exception as e: # noqa: B902 self._error("CrowdStrike connector internal error: {0}", str(e)) self._sleep() def _initiate_work(self, timestamp: int) -> str: datetime_str = timestamp_to_datetime(timestamp) friendly_name = f"{self.helper.connect_name} @ {datetime_str}" work_id = self.helper.api.work.initiate_work(self.helper.connect_id, friendly_name) self._info("New work '{0}' initiated", work_id) return work_id def _complete_work(self, work_id: str, message: str) -> None: self.helper.api.work.to_processed(work_id, message) def _get_interval(self) -> int: return int(self.interval_sec) def _info(self, msg: str, *args: Any) -> None: fmt_msg = msg.format(*args) self.helper.log_info(fmt_msg) def _error(self, msg: str, *args: Any) -> None: fmt_msg = msg.format(*args) self.helper.log_error(fmt_msg)
class Cve: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader ) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) # Extra config self.cve_nvd_data_feed = get_config_variable('CVE_NVD_DATA_FEED', ['cve', 'nvd_data_feed'], config) self.cve_interval = get_config_variable('CVE_INTERVAL', ['cve', 'interval'], config, True) self.update_existing_data = get_config_variable( 'CONNECTOR_UPDATE_EXISTING_DATA', ['connector', 'update_existing_data'], config) def get_interval(self): return int(self.cve_interval) * 60 * 60 * 24 def run(self): self.helper.log_info('Fetching CVE knowledge...') while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and 'last_run' in current_state: last_run = current_state['last_run'] self.helper.log_info('Connector last run: ' + datetime.utcfromtimestamp(last_run). strftime('%Y-%m-%d %H:%M:%S')) else: last_run = None self.helper.log_info('Connector has never run') # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.cve_interval) - 1) * 60 * 60 * 24)): # Downloading json.gz file self.helper.log_info('Requesting the file') urllib.request.urlretrieve( self.cve_nvd_data_feed, os.path.dirname(os.path.abspath(__file__)) + '/data.json.gz') # Unzipping the file self.helper.log_info('Unzipping the file') with gzip.open('data.json.gz', 'rb') as f_in: with open('data.json', 'wb') as f_out: shutil.copyfileobj(f_in, f_out) # Converting the file to stix2 self.helper.log_info('Converting the file') convert('data.json', 'data-stix2.json') with open('data-stix2.json') as stix_json: contents = stix_json.read() self.helper.send_stix2_bundle( contents, self.helper.connect_scope, self.update_existing_data) # Remove files os.remove('data.json') os.remove('data.json.gz') os.remove('data-stix2.json') # Store the current timestamp as a last run self.helper.log_info( 'Connector successfully run, storing last_run as ' + str(timestamp)) self.helper.set_state({'last_run': timestamp}) self.helper.log_info( 'Last_run stored, next run in: ' + str(round(self.get_interval() / 60 / 60 / 24, 2)) + ' days') time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( 'Connector will not run, next run in: ' + str(round(new_interval / 60 / 60 / 24, 2)) + ' days') time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info('Connector stop') exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Sekoia(object): limit = 200 def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self._cache = {} # Extra config self.base_url = self.get_config("base_url", config, "https://api.sekoia.io") self.start_date: str = self.get_config("start_date", config, None) self.collection = self.get_config( "collection", config, "d6092c37-d8d7-45c3-8aff-c4dc26030608") self.create_observables = self.get_config("create_observables", config, True) self.helper.log_info("Setting up api key") self.api_key = self.get_config("api_key", config) if not self.api_key: self.helper.log_error("API key is Missing") raise ValueError("API key is Missing") self._load_data_sets() self.helper.log_info("All datasets has been loaded") def run(self): self.helper.log_info("Starting SEKOIA.IO connector") state = self.helper.get_state() or {} cursor = state.get("last_cursor", self.generate_first_cursor()) self.helper.log_info(f"Starting with {cursor}") while True: friendly_name = "SEKOIA run @ " + datetime.utcnow().strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) try: cursor = self._run(cursor, work_id) message = f"Connector successfully run, cursor updated to {cursor}" self.helper.log_info(message) self.helper.api.work.to_processed(work_id, message) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") self.helper.api.work.to_processed(work_id, "Connector is stopping") exit(0) except Exception as ex: # In case of error try to get the last updated cursor # since `_run` updates it after every successful request state = self.helper.get_state() or {} cursor = state.get("last_cursor", cursor) self.helper.log_error(str(ex)) message = f"Connector encountered an error, cursor updated to {cursor}" self.helper.api.work.to_processed(work_id, message) time.sleep(60) @staticmethod def get_config(name: str, config, default: Any = None): env_name = f"SEKOIA_{name.upper()}" result = get_config_variable(env_name, ["sekoia", name], config) return result or default def get_collection_url(self): return urljoin(self.base_url, "v2/inthreat/collections", self.collection, "objects") def get_object_url(self, ids: Iterable): return urljoin(self.base_url, "v2/inthreat/objects", ",".join(ids)) def get_relationship_url(self, ids: Iterable): return urljoin(self.base_url, "v2/inthreat/relationships", ",".join(ids)) def get_file_url(self, item_id: str, file_hash: str): return urljoin(self.base_url, "v2/inthreat/objects", item_id, "files", file_hash) def generate_first_cursor(self) -> str: """ Generate the first cursor to interrogate the API so we don't start at the beginning. """ start = f"{(datetime.utcnow() - timedelta(hours=1)).isoformat()}Z" if self.start_date: try: start = f"{parse(self.start_date).isoformat()}Z" except ParserError: pass return base64.b64encode(start.encode("utf-8")).decode("utf-8") @staticmethod def chunks(items, chunk_size): """ Yield successive n-sized chunks from items. """ for i in range(0, len(items), chunk_size): yield items[i:i + chunk_size] def _run(self, cursor, work_id): while True: params = {"limit": self.limit, "cursor": cursor} data = self._send_request(self.get_collection_url(), params) if not data: return cursor cursor = data[ "next_cursor"] or cursor # In case next_cursor is None items = data["items"] if not items: return cursor items = self._retrieve_references(items) self._add_main_observable_type_to_indicators(items) if self.create_observables: self._add_create_observables_to_indicators(items) items = self._clean_ic_fields(items) self._add_files_to_items(items) bundle = self.helper.stix2_create_bundle(items) try: self.helper.send_stix2_bundle(bundle, update=True, work_id=work_id) except RecursionError: self.helper.send_stix2_bundle(bundle, update=True, work_id=work_id, bypass_split=True) self.helper.set_state({"last_cursor": cursor}) if len(items) < self.limit: # We got the last results return cursor def _clean_ic_fields(self, items: List[Dict]) -> List[Dict]: """ Remove fields specific to the Intelligence Center that will not add value in OpenCTI """ return [{ field: value for field, value in item.items() if not self._field_to_ignore(field) } for item in items] @staticmethod def _field_to_ignore(field: str) -> bool: to_ignore = [ "x_ic_impacted_locations", "x_ic_impacted_sectors", ] return ((field.startswith("x_ic") or field.startswith("x_inthreat")) and (field.endswith("ref") or field.endswith("refs"))) or field in to_ignore @staticmethod def _add_create_observables_to_indicators(items: List[Dict]): for item in items: if item.get("type") == "indicator": item["x_opencti_create_observables"] = True @staticmethod def _add_main_observable_type_to_indicators(items: List[Dict]): for item in items: if (item.get("type") == "indicator" and item.get("x_ic_observable_types") is not None and len(item.get("x_ic_observable_types")) > 0): stix_type = item.get("x_ic_observable_types")[0] item[ "x_opencti_main_observable_type"] = OpenCTIStix2Utils.stix_observable_opencti_type( stix_type) def _retrieve_references(self, items: List[Dict], current_depth: int = 0) -> List[Dict]: """ Retrieve the references that appears in the given items. To avoid having an infinite recursion a safe guard has been implemented. """ if current_depth == 5: # Safe guard to avoid infinite recursion if an object was not found for example return items items = self._update_mapped_refs(items) to_fetch = self._get_missing_refs(items) for ref in list(to_fetch): if ref in self._cache: items.append(self._cache[ref]) to_fetch.remove(ref) if not to_fetch: return items objects_to_fetch = [ i for i in to_fetch if not i.startswith("relationship--") ] items += self._retrieve_by_ids(objects_to_fetch, self.get_object_url) relationships_to_fetch = [ i for i in to_fetch if i.startswith("relationship--") ] items += self._retrieve_by_ids(relationships_to_fetch, self.get_relationship_url) return self._retrieve_references(items, current_depth + 1) def _get_missing_refs(self, items: List[Dict]) -> Set: """ Get the object's references that are missing """ ids = {item["id"] for item in items} refs = set() for item in items: refs.update(item.get("object_marking_refs", [])) if item.get("created_by_ref"): refs.add(item["created_by_ref"]) if item["type"] == "report": object_refs = [ ref for ref in item.get("object_refs", []) if not self._is_mapped_ref(ref) ] refs.update(object_refs) if item["type"] == "relationship": if not self._is_mapped_ref(item["source_ref"]): refs.add(item["source_ref"]) if not self._is_mapped_ref(item["target_ref"]): refs.add(item["target_ref"]) return refs - ids def _is_mapped_ref(self, ref: str) -> bool: """ Whether or not the reference is a mapped one. """ return (ref in self._geography_mapping.values() or ref in self._sectors_mapping.values()) def _update_mapped_refs(self, items: List[Dict]): """ Update references that are mapped between SEKOIA and OpenCTI. This way we will be able to create links with OpenCTI own sectors and locations. """ for item in items: if item.get("object_marking_refs"): item["object_marking_refs"] = self._replace_mapped_refs( item["object_marking_refs"]) if item.get("object_refs"): item["object_refs"] = self._replace_mapped_refs( item["object_refs"]) if item.get("source_ref"): item["source_ref"] = self._get_mapped_ref(item["source_ref"]) if item.get("target_ref"): item["target_ref"] = self._get_mapped_ref(item["target_ref"]) return items def _replace_mapped_refs(self, refs: List): for i, ref in enumerate(refs): refs[i] = self._get_mapped_ref(ref) return refs def _get_mapped_ref(self, ref: str): if ref in self._geography_mapping: return self._geography_mapping[ref] if ref in self._sectors_mapping: return self._sectors_mapping[ref] return ref def _retrieve_by_ids(self, ids, url_callback): """ Fetch the items for the given ids. """ items = [] for chunk in self.chunks(ids, 40): url = url_callback(chunk) res = self._send_request(url) if not res: continue if "items" in res: items.extend(res["items"]) for item in res["items"]: self._clean_and_add_to_cache(item) if "data" in res: items.append(res["data"]) self._clean_and_add_to_cache(res["data"]) return items def _clean_and_add_to_cache(self, item): """ Add item to the cache only if it is an identity or a marking definition """ if item["id"].startswith( "marking-definition--") or item["id"].startswith("identity--"): if item["id"].startswith("marking-definition--"): item.pop("object_marking_refs", None) self._cache[item["id"]] = item def _send_request(self, url, params=None, binary=False): """ Sends the HTTP request and handle the errors """ try: headers = {"Authorization": f"Bearer {self.api_key}"} res = requests.get(url, params=params, headers=headers) res.raise_for_status() if binary: return res.content return res.json() except RequestException as ex: if ex.response: error = f"Request failed with status: {ex.response.status_code}" self.helper.log_error(error) else: self.helper.log_error(str(ex)) return None def _load_data_sets(self): # Mapping between SEKOIA sectors/locations and OpenCTI ones self.helper.log_info("Loading locations mapping") with open("./data/geography_mapping.json") as fp: self._geography_mapping: Dict = json.load(fp) self.helper.log_info("Loading sectors mapping") with open("./data/sectors_mapping.json") as fp: self._sectors_mapping: Dict = json.load(fp) # Adds OpenCTI sectors/locations to cache self.helper.log_info("Loading OpenCTI sectors") with open("./data/sectors.json") as fp: objects = json.load(fp)["objects"] for sector in objects: self._clean_and_add_to_cache(sector) self.helper.log_info("Loading OpenCTI locations") with open("./data/geography.json") as fp: for geography in json.load(fp)["objects"]: self._clean_and_add_to_cache(geography) def _add_files_to_items(self, items: List[Dict]): for item in items: if not item.get("x_inthreat_uploaded_files"): continue item["x_opencti_files"] = [] for file in item.get("x_inthreat_uploaded_files", []): url = self.get_file_url(item["id"], file["sha256"]) data = self._send_request(url, binary=True) if data: item["x_opencti_files"].append({ "name": file["file_name"], "data": base64.b64encode(data).decode("utf-8"), "mime_type": file.get("mime_type", "text/plain"), })
class Cve: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.cve_import_history = get_config_variable( "CVE_IMPORT_HISTORY", ["cve", "import_history"], config, False) self.cve_nvd_data_feed = get_config_variable("CVE_NVD_DATA_FEED", ["cve", "nvd_data_feed"], config) self.cve_history_data_feed = get_config_variable( "CVE_HISTORY_DATA_FEED", ["cve", "history_data_feed"], config) self.cve_interval = get_config_variable("CVE_INTERVAL", ["cve", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self): return int(self.cve_interval) * 60 * 60 * 24 def delete_files(self): if os.path.exists("data.json"): os.remove("data.json") if os.path.exists("data.json.gz"): os.remove("data.json.gz") if os.path.exists("data-stix2.json"): os.remove("data-stix2.json") def convert_and_send(self, url, work_id): try: # Downloading json.gz file self.helper.log_info("Requesting the file " + url) response = urllib.request.urlopen( url, context=ssl.create_default_context(cafile=certifi.where())) image = response.read() with open( os.path.dirname(os.path.abspath(__file__)) + "/data.json.gz", "wb") as file: file.write(image) # Unzipping the file self.helper.log_info("Unzipping the file") with gzip.open("data.json.gz", "rb") as f_in: with open("data.json", "wb") as f_out: shutil.copyfileobj(f_in, f_out) # Converting the file to stix2 self.helper.log_info("Converting the file") convert("data.json", "data-stix2.json") with open("data-stix2.json") as stix_json: contents = stix_json.read() self.helper.send_stix2_bundle( contents, entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) # Remove files self.delete_files() except Exception as e: self.delete_files() self.helper.log_error(str(e)) time.sleep(60) def run(self): self.helper.log_info("Fetching CVE knowledge...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.cve_interval) - 1) * 60 * 60 * 24)): timestamp = int(time.time()) now = datetime.utcfromtimestamp(timestamp) friendly_name = "CVE run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) self.convert_and_send(self.cve_nvd_data_feed, work_id) # If import history and never run if last_run is None and self.cve_import_history: now = datetime.now() years = list(range(2002, now.year + 1)) for year in years: self.convert_and_send( f"{self.cve_history_data_feed}nvdcve-1.1-{year}.json.gz", work_id, ) # Store the current timestamp as a last run self.helper.log_info( "Connector successfully run, storing last_run as " + str(timestamp)) self.helper.set_state({"last_run": timestamp}) message = ( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") self.helper.api.work.to_processed(work_id, message) self.helper.log_info(message) time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class CrowdStrike: """CrowdStrike connector.""" _CONFIG_NAMESPACE = "crowdstrike" _CONFIG_BASE_URL = f"{_CONFIG_NAMESPACE}.base_url" _CONFIG_CLIENT_ID = f"{_CONFIG_NAMESPACE}.client_id" _CONFIG_CLIENT_SECRET = f"{_CONFIG_NAMESPACE}.client_secret" _CONFIG_INTERVAL_SEC = f"{_CONFIG_NAMESPACE}.interval_sec" _CONFIG_SCOPES = f"{_CONFIG_NAMESPACE}.scopes" _CONFIG_TLP = f"{_CONFIG_NAMESPACE}.tlp" _CONFIG_ACTOR_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.actor_start_timestamp" _CONFIG_REPORT_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.report_start_timestamp" _CONFIG_REPORT_INCLUDE_TYPES = f"{_CONFIG_NAMESPACE}.report_include_types" _CONFIG_REPORT_STATUS = f"{_CONFIG_NAMESPACE}.report_status" _CONFIG_REPORT_TYPE = f"{_CONFIG_NAMESPACE}.report_type" _CONFIG_REPORT_GUESS_MALWARE = f"{_CONFIG_NAMESPACE}.report_guess_malware" _CONFIG_INDICATOR_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.indicator_start_timestamp" _CONFIG_INDICATOR_EXCLUDE_TYPES = f"{_CONFIG_NAMESPACE}.indicator_exclude_types" _CONFIG_UPDATE_EXISTING_DATA = "connector.update_existing_data" _CONFIG_SCOPE_ACTOR = "actor" _CONFIG_SCOPE_REPORT = "report" _CONFIG_SCOPE_INDICATOR = "indicator" _CONFIG_SCOPE_YARA_MASTER = "yara_master" _CONFIG_TLP_MAPPING = { "white": TLP_WHITE, "green": TLP_GREEN, "amber": TLP_AMBER, "red": TLP_RED, } _CONFIG_REPORT_STATUS_MAPPING = { "new": 0, "in progress": 1, "analyzed": 2, "closed": 3, } _DEFAULT_REPORT_TYPE = "Threat Report" _STATE_LAST_RUN = "last_run" def __init__(self) -> None: """Initialize CrowdStrike connector.""" config = self._read_configuration() self.helper = OpenCTIConnectorHelper(config) # CrowdStrike connector configuration base_url = self._get_configuration(config, self._CONFIG_BASE_URL) client_id = self._get_configuration(config, self._CONFIG_CLIENT_ID) client_secret = self._get_configuration(config, self._CONFIG_CLIENT_SECRET) self.interval_sec = self._get_configuration( config, self._CONFIG_INTERVAL_SEC, is_number=True ) scopes_str = self._get_configuration(config, self._CONFIG_SCOPES) scopes = set() if scopes_str is not None: scopes = set(convert_comma_separated_str_to_list(scopes_str)) self.scopes = scopes tlp = self._get_configuration(config, self._CONFIG_TLP) tlp_marking = self._convert_tlp_to_marking_definition(tlp) actor_start_timestamp = self._get_configuration( config, self._CONFIG_ACTOR_START_TIMESTAMP, is_number=True ) report_start_timestamp = self._get_configuration( config, self._CONFIG_REPORT_START_TIMESTAMP, is_number=True ) report_status_str = self._get_configuration(config, self._CONFIG_REPORT_STATUS) report_status = self._convert_report_status_str_to_report_status_int( report_status_str ) report_type = self._get_configuration(config, self._CONFIG_REPORT_TYPE) if not report_type: report_type = self._DEFAULT_REPORT_TYPE report_include_types_str = self._get_configuration( config, self._CONFIG_REPORT_INCLUDE_TYPES ) report_include_types = [] if report_include_types_str is not None: report_include_types = convert_comma_separated_str_to_list( report_include_types_str ) report_guess_malware = bool( self._get_configuration(config, self._CONFIG_REPORT_GUESS_MALWARE) ) indicator_start_timestamp = self._get_configuration( config, self._CONFIG_INDICATOR_START_TIMESTAMP, is_number=True ) indicator_exclude_types_str = self._get_configuration( config, self._CONFIG_INDICATOR_EXCLUDE_TYPES ) indicator_exclude_types = [] if indicator_exclude_types_str is not None: indicator_exclude_types = convert_comma_separated_str_to_list( indicator_exclude_types_str ) update_existing_data = bool( self._get_configuration(config, self._CONFIG_UPDATE_EXISTING_DATA) ) author = self._create_author() # Create CrowdStrike client and importers client = CrowdStrikeClient(base_url, client_id, client_secret) self.actor_importer = ActorImporter( self.helper, client.intel_api.actors, update_existing_data, author, actor_start_timestamp, tlp_marking, ) self.report_importer = ReportImporter( self.helper, client.intel_api.reports, update_existing_data, author, report_start_timestamp, tlp_marking, report_include_types, report_status, report_type, report_guess_malware, ) self.indicator_importer = IndicatorImporter( self.helper, client.intel_api.indicators, client.intel_api.reports, update_existing_data, author, indicator_start_timestamp, tlp_marking, indicator_exclude_types, report_status, report_type, ) self.rules_yara_master_importer = RulesYaraMasterImporter( self.helper, client.intel_api.rules, client.intel_api.reports, author, tlp_marking, update_existing_data, report_status, report_type, ) @staticmethod def _read_configuration() -> Dict[str, str]: config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/../config.yml" if not os.path.isfile(config_file_path): return {} return yaml.load(open(config_file_path), Loader=yaml.FullLoader) @staticmethod def _create_author() -> Identity: return create_organization("CrowdStrike") @staticmethod def _get_yaml_path(config_name: str) -> List[str]: return config_name.split(".") @staticmethod def _get_environment_variable_name(yaml_path: List[str]) -> str: return "_".join(yaml_path).upper() @classmethod def _get_configuration( cls, config: Dict[str, Any], config_name: str, is_number: bool = False ) -> Any: yaml_path = cls._get_yaml_path(config_name) env_var_name = cls._get_environment_variable_name(yaml_path) config_value = get_config_variable( env_var_name, yaml_path, config, isNumber=is_number ) return config_value @classmethod def _convert_tlp_to_marking_definition(cls, tlp_value: str) -> MarkingDefinition: return cls._CONFIG_TLP_MAPPING[tlp_value.lower()] @classmethod def _convert_report_status_str_to_report_status_int(cls, report_status: str) -> int: return cls._CONFIG_REPORT_STATUS_MAPPING[report_status.lower()] def get_interval(self) -> int: return int(self.interval_sec) def _load_state(self) -> Dict[str, Any]: current_state = self.helper.get_state() if not current_state: return {} return current_state @staticmethod def _get_state_value( state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None ) -> Any: if state is not None: return state.get(key, default) return default def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool: if last_run is None: return True time_diff = current_time - last_run return time_diff >= self.get_interval() @staticmethod def _current_unix_timestamp() -> int: return int(time.time()) def run(self): self.helper.log_info("Starting CrowdStrike connector...") while True: try: timestamp = self._current_unix_timestamp() current_state = self._load_state() self.helper.log_info(f"Loaded state: {current_state}") last_run = self._get_state_value(current_state, self._STATE_LAST_RUN) if self._is_scheduled(last_run, timestamp): actor_importer_state = self._run_actor_importer(current_state) report_importer_state = self._run_report_importer(current_state) indicator_importer_state = self._run_indicator_importer( current_state ) yara_master_importer_state = self._run_rules_yara_master_importer( current_state ) new_state = current_state.copy() new_state.update(actor_importer_state) new_state.update(report_importer_state) new_state.update(indicator_importer_state) new_state.update(yara_master_importer_state) new_state[self._STATE_LAST_RUN] = self._current_unix_timestamp() self.helper.log_info(f"Storing new state: {new_state}") self.helper.set_state(new_state) self.helper.log_info( f"State stored, next run in: {self.get_interval()} seconds" ) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( f"Connector will not run, next run in: {new_interval} seconds" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60) def _run_actor_importer( self, current_state: Mapping[str, Any] ) -> Mapping[str, Any]: if self._is_scope_enabled(self._CONFIG_SCOPE_ACTOR): return self.actor_importer.run(current_state) return {} def _run_report_importer( self, current_state: Mapping[str, Any] ) -> Mapping[str, Any]: if self._is_scope_enabled(self._CONFIG_SCOPE_REPORT): return self.report_importer.run(current_state) return {} def _run_indicator_importer( self, current_state: Mapping[str, Any] ) -> Mapping[str, Any]: if self._is_scope_enabled(self._CONFIG_SCOPE_INDICATOR): return self.indicator_importer.run(current_state) return {} def _run_rules_yara_master_importer( self, current_state: Mapping[str, Any] ) -> Mapping[str, Any]: if self._is_scope_enabled(self._CONFIG_SCOPE_YARA_MASTER): return self.rules_yara_master_importer.run(current_state) return {} def _is_scope_enabled(self, scope: str) -> bool: result = scope in self.scopes if not result: self.helper.log_info(f"Scope '{scope}' is not enabled") return result
class Mitre: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.mitre_enterprise_file_url = get_config_variable( "MITRE_ENTERPRISE_FILE_URL", ["mitre", "enterprise_file_url"], config) self.mitre_pre_attack_file_url = get_config_variable( "MITRE_PRE_ATTACK_FILE_URL", ["mitre", "pre_attack_file_url"], config) self.mitre_mobile_attack_file_url = get_config_variable( "MITRE_MOBILE_ATTACK_FILE_URL", ["mitre", "mobile_attack_file_url"], config) self.mitre_interval = get_config_variable("MITRE_INTERVAL", ["mitre", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self): return int(self.mitre_interval) * 60 * 60 * 24 def next_run(self, seconds): return def run(self): self.helper.log_info("Fetching MITRE datasets...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.mitre_interval) - 1) * 60 * 60 * 24)): self.helper.log_info("Connector will run!") try: enterprise_data = (urllib.request.urlopen( self.mitre_enterprise_file_url).read().decode( "utf-8")) self.helper.send_stix2_bundle( enterprise_data, self.helper.connect_scope, self.update_existing_data, ) except Exception as e: self.helper.log_error(str(e)) try: pre_attack_data = (urllib.request.urlopen( self.mitre_pre_attack_file_url).read().decode( "utf-8")) self.helper.send_stix2_bundle( pre_attack_data, self.helper.connect_scope, self.update_existing_data, ) except Exception as e: self.helper.log_error(str(e)) try: mobile_attack_data = (urllib.request.urlopen( self.mitre_mobile_attack_file_url).read().decode( "utf-8")) self.helper.send_stix2_bundle( mobile_attack_data, self.helper.connect_scope, self.update_existing_data, ) except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run self.helper.log_info( "Connector successfully run, storing last_run as " + str(timestamp)) self.helper.set_state({"last_run": timestamp}) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class LastInfoSec: def __init__(self): config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) self.lastinfosec_url = get_config_variable( "CONFIG_LIS_URL", ["lastinfosec", "api_url"], config) self.lastinfosec_apikey = get_config_variable( "CONFIG_LIS_APIKEY", ["lastinfosec", "api_key"], config) self.opencti_url = get_config_variable( "OPENCTI_URL", ["opencti", "url"], config) self.opencti_id = get_config_variable( "OPENCTI_TOKEN", ["opencti", "token"], config) self.update_existing_data = True self.api = OpenCTIApiClient(self.opencti_url, self.opencti_id) def run(self): self.helper.log_info("Fetching LastInfoSec datasets...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info( "Connector last run: {0}".format(datetime.utcfromtimestamp(last_run).strftime("%Y-%m-%d %H:%M:%S"))) else: last_run = None self.helper.log_info("Connector has never run") lastinfosec_data = requests.get( self.lastinfosec_url+self.lastinfosec_apikey).json() if "message" in lastinfosec_data.keys(): for data in lastinfosec_data["message"]: sdata = json.dumps(data) self.helper.log_info(type(sdata)) self.helper.log_info(sdata) list = self.api.stix2.import_bundle_from_json(sdata) # Store the current timestamp as a last run self.helper.log_info( "Connector successfully run, storing last_run as {0}".format(timestamp) ) self.helper.set_state({"last_run": timestamp}) time.sleep(3500) else: self.helper.log_info( "Connector successfully run, storing last_run as {0}".format(timestamp) ) time.sleep(300) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error("run:"+str(e)) time.sleep(60)
class capeConnector: """Connector object""" def __init__(self): """Read in config variables""" config_file_path = os.path.dirname(os.path.abspath(__file__)) config_file_path += "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.cape_api_url = get_config_variable("CAPE_API_URL", ["cape", "api_url"], config) self.cape_url = get_config_variable("CAPE_BASE_URL", ["cape", "base_url"], config) self.EnableNetTraffic = get_config_variable( "CAPE_ENABLE_NETWORK_TRAFFIC", ["cape", "enable_network_traffic"], config, default=False, ) self.EnableRegKeys = get_config_variable( "CAPE_ENABLE_REGISTRY_KEYS", ["cape", "enable_registry_keys"], config, default=False, ) self.verify_ssl = get_config_variable("VERIFY_SSL", ["cape", "verify_ssl"], config, default=True) self.interval = get_config_variable("CAPE_INTERVAL", ["cape", "interval"], config, True, 30) self.start_id = get_config_variable("CAPE_START_TASK_ID", ["cape", "start_task_id"], config, True, 0) self.report_score = get_config_variable("CAPE_REPORT_SCORE", ["cape", "report_score"], config, True, 0) self.create_indicators = get_config_variable( "CAPE_CREATE_INDICATORS", ["cape", "create_indicators"], config) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.cape_api: cuckoo = cuckoo(self.helper, self.cape_api_url, self.verify_ssl) def get_interval(self): """Converts interval hours to seconds""" return int(self.interval) * 60 @property def first_run(self): """Checks if connector has run before""" current_state = self.helper.get_state() return current_state is None or "last_run" not in current_state def run(self): """Run connector on a schedule""" while True: if self.first_run: state = self.helper.get_state() self.helper.log_info("Connector has never run") self.helper.log_info(str(state)) # Get Last Cape Task Pulled if not state: current_task = 0 else: if "task" in state: current_task = self.helper.get_state()["task"] else: current_task = 0 # Check If starting Task > last task if self.start_id > current_task: current_task = self.start_id self.helper.set_state({"task": self.start_id}) else: last_run = datetime.utcfromtimestamp( self.helper.get_state()["last_run"]).strftime( "%Y-%m-%d %H:%M:%S") self.helper.log_info("Connector last run: " + last_run) # Get Last Cape Task Pulled state = self.helper.get_state() self.helper.log_info(str(state)) if not state: current_task = 0 self.helper.log_info("Last Task ID (STATE): " + str(current_task)) if "task" in state: current_task = state["task"] self.helper.log_info("Last Task ID (STATE): " + str(current_task)) else: current_task = 0 # Check If starting Task > last task if self.start_id > current_task: current_task = self.start_id self.helper.set_state({"task": self.start_id}) try: CapeTasks = (self.cape_api.getCuckooTasks() ) # Pull List of tasks from the Cape API except Exception as err: self.helper.log_error("Error connecting to Cape API") self.helper.log_error(str(err)) raise (err) for task in reversed(CapeTasks): if not task["status"] == "reported": continue # If task Has not reported Skip if not task["completed_on"]: continue # If task Has not completed Skip try: if task["id"] > current_task: taskSummary = cuckooReport( self.cape_api.getTaskReport( task["id"])) # Pull Cape Report and Searilize if not taskSummary: continue # If no report continue if not taskSummary.info: continue # If no report.info continue - we really need this :) self.helper.log_info( f"Processing Task {taskSummary.info.id}") # Process and submit cape task as stix bundle openCTIInterface( taskSummary, self.helper, self.update_existing_data, [], self.create_indicators, self.cape_url, self.EnableNetTraffic, self.EnableRegKeys, self.report_score, ) # Update last task pulled self.helper.set_state({"task": taskSummary.info.id}) self.helper.log_info(f"Synced task {task['id']}") except Exception as e: self.helper.log_error( f"An error Occured fetching task {task['id']}; {str(e)}" ) self.helper.log_info("Finished grabbing Cape Reports") self.helper.log_info(f"Run Complete. Sleeping until next run in " f"{self.interval} Minutes") time.sleep(self.get_interval())
class Cve: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.cve_import_history = get_config_variable( "CVE_IMPORT_HISTORY", ["cve", "import_history"], config, False) self.cve_nvd_data_feed = get_config_variable("CVE_NVD_DATA_FEED", ["cve", "nvd_data_feed"], config) self.cve_interval = get_config_variable("CVE_INTERVAL", ["cve", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self): return int(self.cve_interval) * 60 * 60 * 24 def convert_and_send(self, url): try: # Downloading json.gz file self.helper.log_info("Requesting the file " + url) urllib.request.urlretrieve( self.cve_nvd_data_feed, os.path.dirname(os.path.abspath(__file__)) + "/data.json.gz", ) # Unzipping the file self.helper.log_info("Unzipping the file") with gzip.open("data.json.gz", "rb") as f_in: with open("data.json", "wb") as f_out: shutil.copyfileobj(f_in, f_out) # Converting the file to stix2 self.helper.log_info("Converting the file") convert("data.json", "data-stix2.json") with open("data-stix2.json") as stix_json: contents = stix_json.read() self.helper.send_stix2_bundle(contents, self.helper.connect_scope, self.update_existing_data) # Remove files os.remove("data.json") os.remove("data.json.gz") os.remove("data-stix2.json") except Exception as e: self.helper.log_error(str(e)) time.sleep(60) def run(self): self.helper.log_info("Fetching CVE knowledge...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.cve_interval) - 1) * 60 * 60 * 24)): self.convert_and_send(self.cve_nvd_data_feed) # If import history and never run if last_run is None and self.cve_import_history: now = datetime.now() years = list(range(2002, now.year)) for year in years: self.convert_and_send( "https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-" + str(year) + ".json.gz") # Store the current timestamp as a last run self.helper.log_info( "Connector successfully run, storing last_run as " + str(timestamp)) self.helper.set_state({"last_run": timestamp}) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Taxii2Connector: """Connector object""" def __init__(self): """Read in config variables""" config_file_path = os.path.dirname(os.path.abspath(__file__)) config_file_path += "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) username = get_config_variable( "TAXII2_USERNAME", ["taxii2", "username"], config ) password = get_config_variable( "TAXII2_PASSWORD", ["taxii2", "password"], config ) server_url = get_config_variable( "TAXII2_DISCOVERY_URL", ["taxii2", "discovery_url"], config ) self.verify_ssl = get_config_variable( "VERIFY_SSL", ["taxii2", "verify_ssl"], config, default=True ) # if V21 flag set to true if get_config_variable("TAXII2_V21", ["taxii2", "v2.1"], config, default=True): self.server = tx21.Server( server_url, user=username, password=password, verify=self.verify_ssl ) else: self.server = tx20.Server( server_url, user=username, password=password, verify=self.verify_ssl ) self.collections = get_config_variable( "TAXII2_COLLECTIONS", ["taxii2", "collections"], config ).split(",") self.initial_history = get_config_variable( "TAXII2_INITIAL_HISTORY", ["taxii2", "initial_history"], config, True ) self.per_request = get_config_variable( "TAXII2_PER_REQUEST", ["taxii2", "per_request"], config, True ) self.interval = get_config_variable( "TAXII2_INTERVAL", ["taxii2", "interval"], config, True, 1 ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) @staticmethod def _init_collection_table(colls): """ Creates a table of string:Set where the key is the API root and the value is the list of Collections to read Args: colls (str): a comma delimited list of API roots and Collections to Poll Returns: A dictionary with [str, Set], where the Key is the API root and the value is the list of Collections to be polled """ table = {} for col in colls.split(","): root, coll = col.split(".") if root in table: table[root].add(coll) else: table[root] = {coll} return table def get_interval(self): """Converts interval hours to seconds""" return int(self.interval) * 3600 @property def first_run(self): """Checks if connector has run before""" current_state = self.helper.get_state() return current_state is None or "last_run" not in current_state def run(self): """Run connector on a schedule""" while True: self.server.refresh() timestamp = int(time.time()) if self.first_run: self.helper.log_info("Connector has never run") else: last_run = datetime.utcfromtimestamp( self.helper.get_state()["last_run"] ).strftime("%Y-%m-%d %H:%M:%S") self.helper.log_info("Connector last run: " + last_run) for collection in self.collections: try: root_path, coll_title = collection.split(".") if root_path == "*": self.poll_all_roots(coll_title) elif coll_title == "*": root = self._get_root(root_path) self.poll_entire_root(root) else: root = self._get_root(root_path) coll = self._get_collection(root, coll_title) self.poll(coll) except (TAXIIServiceException, HTTPError) as err: self.helper.log_error("Error connecting to TAXII server") self.helper.log_error(err) continue self.helper.log_info( f"Run Complete. Sleeping until next run in " f"{self.interval} hours" ) self.helper.set_state({"last_run": timestamp}) time.sleep(self.get_interval()) def poll_all_roots(self, coll_title): """ Polls all API roots for the specified collections Args: coll_title (str): The Name of a Collection """ self.helper.log_info("Polling all API Roots") for root in self.server.api_roots: if coll_title == "*": self.poll_entire_root(root) else: try: coll = self._get_collection(root, coll_title) except TAXIIServiceException: self.helper.log_error( f"Error searching for collection {coll_title} in API Root {root.title}" ) return try: self.poll(coll) except TAXIIServiceException as err: msg = ( f"Error trying to poll Collection {coll_title} " f"in API Root {root.title}. Skipping" ) self.helper.log_error(msg) self.helper.log_error(err) def poll_entire_root(self, root): """ Polls all Collections in a given API Root Args: root (taxii2client.v2*.ApiRoot: Api Root to poll """ self.helper.log_info(f"Polling entire API root {root.title}") for coll in root.collections: try: self.poll(coll) except TAXIIServiceException as err: msg = ( f"Error trying to poll Collection {coll.title} " f"in API Root {root.title}. Skipping" ) self.helper.log_error(msg) self.helper.log_error(err) def poll(self, collection): """ Polls a specified collection in a specified API root Args: collection (taxii2client.v2*.Collection: THe Collection to poll """ filters = {} if self.first_run: lookback = self.initial_history or None else: lookback = self.interval if lookback: added_after = datetime.now() - timedelta(hours=lookback) filters["added_after"] = added_after self.helper.log_info(f"Polling Collection {collection.title}") self.send_to_server(collection.get_objects(**filters)) def send_to_server(self, bundle): """ Sends a STIX2 bundle to OpenCTI Server Args: bundle (list(dict)): STIX2 bundle represented as a list of dicts """ self.helper.log_info( f"Sending Bundle to server with '{len(bundle.get('objects', []))}' objects" ) try: self.helper.send_stix2_bundle( json.dumps(bundle), update=self.update_existing_data, ) except Exception as e: self.helper.log_error(str(e)) def _get_collection(self, root, coll_title): """ Returns a Collection object, given an API Root and a collection name Args: root (taxii2.v2*.ApiRoot): The API Root to search through coll_title (str): The Name of the target Collections Returns: The taxii2.v2*.Collection object with the name `coll_title` """ for coll in root.collections: if coll.title == coll_title: return coll msg = f"Collection {coll_title} does not exist in API root {root.title}" raise TAXIIServiceException(msg) def _get_root(self, root_path): """ Returns an APi Root object, given a Server and an API Root path Args: Server (taxii2.v2*.Server): The TAXII Server to search for root_path (str): the path of the API root in the URL Returns: The taxii2.v2*.Collection object with the name `coll_title` """ for root in self.server.api_roots: if root.url.split("/")[-2] == root_path: return root msg = f"Api Root {root_path} does not exist in the TAXII server" raise TAXIIServiceException(msg)
class Malpedia: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path)) if os.path.isfile(config_file_path) else {} ) self.interval = 1 # 1 Day interval between each scraping self.helper = OpenCTIConnectorHelper(config) # Extra config self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, ) self.MALPEDIA_API = get_config_variable( "MALPEDIA_API", ["malpedia", "MALPEDIA_API"], config ) self.AUTH_KEY = get_config_variable( "AUTH_KEY", ["malpedia", "AUTH_KEY"], config ) self.need_update = True self.last_update = 0 self.api_call = { "API_CHECK_APIKEY": "check/apikey", "API_GET_VERSION": "get/version", "API_GET_FAMILIES": "get/families", "API_LIST_ACTORS": "list/actors", "API_GET_FAMILY": "get/family/", "API_LIST_FAMILIES": "list/families", "API_GET_YARA": "get/yara/", 'API_LIST_SAMPLES': 'list/samples/', } def get_interval(self): return int(self.interval) * 60 * 60 * 24 def next_run(self, seconds): return def run(self): self.helper.log_info("Fetching Malpedia datasets...") while self.need_update: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info( "Connector last run: " + datetime.utcfromtimestamp(last_run).strftime( "%Y-%m-%d %H:%M:%S" ) ) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ( (timestamp - last_run) > ((int(self.interval)) * 60 * 60 * 24) ): self.helper.log_info("Connector will run!") ## CORE ## begin_date = datetime.utcnow() self.helper.log_info("BEGIN WORK : "+datetime.strftime(begin_date,"%Y-%m-%dT%H:%M:%SZ")) # API Key check r = requests.get( self.MALPEDIA_API + self.api_call["API_CHECK_APIKEY"], headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) response_json = r.json() if "Valid token" in response_json["detail"]: self.helper.log_info("--- Authentication successful.") else: self.helper.log_info("--- Authentication failed.") # API Version check r = requests.get(self.MALPEDIA_API + self.api_call["API_GET_VERSION"]) response_json = r.json() self.helper.log_info( "--- Malpedia version: " + str(response_json["version"]) + " (" + response_json["date"] + ")" ) ###[TODO] Améliorer le check de la version : utiliser self.helper.set_state if (self.last_update <= int(response_json["version"])): self.need_update = True self.helper.log_info( "[-] Update is needed : " + str(self.last_update) + " >> " + str(response_json["version"])) else: self.need_update = False self.helper.log_info( "[-] Update is not needed : " + str(self.last_update) + " to " + str(response_json["version"])) if not self.need_update: break ### MAIN GET ### ###get list of families r = requests.get( self.MALPEDIA_API + self.api_call["API_LIST_FAMILIES"], headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) list_of_families_json = r.json() ###get families r = requests.get( self.MALPEDIA_API + self.api_call["API_GET_FAMILIES"], headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) families_json = r.json() ###get list of actors r = requests.get( self.MALPEDIA_API + self.api_call["API_LIST_ACTORS"], headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) # list_actors_json = r.json() # Get all marking definitions marking_definitions = self.helper.api.marking_definition.list() ### [TODO] y a pas de get/actors donc va falloir faire un appel pour chaque actor de la liste ### WORK ### # Link to malpedia website, to add in everything we create external_reference_malpedia = self.helper.api.external_reference.create( source_name="Malpedia (" + str(response_json["version"]) + " (" + response_json["date"] + ")", url="https://malpedia.caad.fkie.fraunhofer.de", ) malpedia_organization = self.helper.api.identity.create( type="Organization", name="Malpedia", description="Malpedia is a free service offered by Fraunhofer FKIE.", ) # for family in families: # print(json.dumps(list_of_families_json, indent=4, sort_keys=True)) self.helper.log_info("[-] Begin import of malwares families") for name in list_of_families_json: # we create the malware(family) malware = self.helper.api.malware.create( name=families_json[name]["common_name"], description=families_json[name]["description"], createdByRef=malpedia_organization["id"], markingDefinitions=['c4ae0c3a-3535-44e2-b206-bb451c25c749'], alias=families_json[name]["alt_names"], ) # we add main external reference to malpedia website self.helper.api.stix_entity.add_external_reference( id=malware["id"], external_reference_id=external_reference_malpedia["id"], ) # we could too add each url referenced in the malpedia entity for ref in families_json[name]["urls"]: ref_name = ref.split('/')[2] ref = self.helper.api.external_reference.create( source_name=ref_name, url=ref, ) # filters=[{"key": "URL", "values": [ref]}] # if not ref_exist: # external_reference = opencti_api_client.external_reference.create( # source_name="Malpedia's sources", url=ref # ) # ) # we add yara rules associated with the malware r = requests.get( self.MALPEDIA_API + self.api_call["API_GET_YARA"] + name, headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) list_yara = r.json() for yara in list_yara: for name_rule, rule in list_yara[yara].items(): # extract yara date date = None date = rule.split("malpedia_version = ")[1].split('\n')[0].replace('"', '').replace('-', '').strip() if date is None: date = response_json["date"] else: date = datetime.strptime(date, "%Y%m%d") date = datetime.strftime(date, "%Y-%m-%dT%H:%M:%SZ")
class Cybercrimetracker: def __init__(self): # Instantiate the connector helper from config config_file_path = "{}/config.yml".format( os.path.dirname(os.path.abspath(__file__))) config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Connector Config self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, isNumber=True, ) self.update_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) # CYBERCRiME-TRACKER.NET Config self.feed_url = get_config_variable( "CYBERCRIMETRACKER_FEED_URL", ["cybercrimetracker", "feed_url"], config, ) self.connector_tlp = get_config_variable( "CYBERCRIMETRACKER_TLP", ["cybercrimetracker", "tlp"], config, ) self.interval = get_config_variable( "CYBERCRIMETRACKER_INTERVAL", ["cybercrimetracker", "interval"], config, isNumber=True, ) @staticmethod def _time_to_datetime(input_date: time) -> datetime: return datetime( input_date.tm_year, input_date.tm_mon, input_date.tm_mday, input_date.tm_hour, input_date.tm_min, input_date.tm_sec, tzinfo=timezone.utc, ).isoformat() def parse_feed_entry(self, entry): """ Parses an entry from the feed and returns a dict with: date: date in iso format type: name of the malware associated with the C2 server url: the url of the C2 ip: the IP address of the C2 ext_link: An external link to CYBERCRiME-TRACKER.NET with details Note: CYBERCRiME-TRACKER.NET does not provide the protocol in the url as such we always assume 'http'. """ parsed_entry = {} pattern = ( r"(?:\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type}" + r"\s+-%{GREEDYDATA}:\s+%{IP:ip}|" + r"\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type})") entry_summary = Grok(pattern).match(entry["summary"]) if entry_summary: parsed_entry["date"] = self._time_to_datetime( entry["published_parsed"]) parsed_entry["type"] = entry_summary["type"] parsed_entry["ext_link"] = entry["link"] parsed_entry["url"] = "http://{}".format(quote(entry["title"])) hostname = urlparse(parsed_entry["url"]).hostname if entry_summary["ip"] is None: parsed_entry["ip"] = hostname else: parsed_entry["ip"] = entry_summary["ip"] parsed_entry["domain"] = hostname self.helper.log_info("Parsed entry: {}".format(entry["title"])) return parsed_entry else: self.helper.log_error("Could not parse: {}".format(entry["title"])) return False def gen_indicator_pattern(self, parsed_entry): if "domain" in parsed_entry.keys(): indicator_pattern = ( "[ipv4-addr:value='{}'] ".format(parsed_entry["ip"]) + "AND [url:value='{}'] ".format(parsed_entry["url"]) + "AND [domain:value='{}']".format(parsed_entry["domain"])) else: indicator_pattern = "[ipv4-addr:value='{}'] ".format( parsed_entry["ip"]) + "AND [url:value='{}']".format( parsed_entry["url"]) return indicator_pattern def run(self): self.helper.log_info("Fetching data CYBERCRiME-TRACKER.NET...") tag = self.helper.api.tag.create( tag_type="C2-Type", value="C2 Server", color="#fc236b", ) tlp = self.helper.api.marking_definition.read( filters=[{ "key": "definition", "values": "TLP:{}".format(self.connector_tlp) }]) while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: {}".format( datetime.utcfromtimestamp(last_run).strftime( "%Y-%m-%d %H:%M:%S"))) else: last_run = None self.helper.log_info("Connector has never run") # Run if it is the first time or we are past the interval if last_run is None or ( (timestamp - last_run) > self.interval): self.helper.log_info("Connector will run!") # Get Feed Content feed = feedparser.parse(self.feed_url) self.helper.log_info("Found: {} entries.".format( len(feed["entries"]))) self.feed_summary = { "Source": feed["feed"]["title"], "Date": self._time_to_datetime( feed["feed"]["published_parsed"]), "Details": feed["feed"]["subtitle"], "Link": feed["feed"]["link"], } # Create entity for the feed. organization = self.helper.api.identity.create( type="Organization", name="CYBERCRiME-TRACKER.NET", description="Tracker collecting and sharing \ daily updates of C2 IPs/Urls. \ http://cybercrime-tracker.net", ) for entry in feed["entries"]: parsed_entry = self.parse_feed_entry(entry) ext_reference = self.helper.api.external_reference.create( source_name="{}".format( self.feed_summary["Source"], ), url=parsed_entry["ext_link"], ) indicator_pattern = self.gen_indicator_pattern( parsed_entry) # Add malware related to indicator malware = self.helper.api.malware.create( name=parsed_entry["type"], description="{} malware.".format( parsed_entry["type"]), ) # Add indicator indicator = self.helper.api.indicator.create( name=parsed_entry["url"], description="C2 URL for: {}".format( parsed_entry["type"]), pattern_type="stix", indicator_pattern=indicator_pattern, main_observable_type="URL", valid_from=parsed_entry["date"], created=parsed_entry["date"], modified=parsed_entry["date"], createdByRef=organization["id"], markingDefinitions=[tlp["id"]], update=self.update_data, ) # Add tag self.helper.api.stix_entity.add_tag( id=indicator["id"], tag_id=tag["id"], ) self.helper.api.stix_entity.add_external_reference( id=indicator["id"], external_reference_id=ext_reference["id"], ) # Add relationship with malware relation = self.helper.api.stix_relation.create( fromType="Indicator", fromId=indicator["id"], toType="Malware", toId=malware["id"], relationship_type="indicates", first_seen=self._time_to_datetime( entry["published_parsed"]), last_seen=self._time_to_datetime( entry["published_parsed"]), description="URLs associated to: " + parsed_entry["type"], weight=self.confidence_level, role_played="C2 Server", createdByRef=organization["id"], created=parsed_entry["date"], modified=parsed_entry["date"], update=self.update_data, ) self.helper.api.stix_entity.add_external_reference( id=relation["id"], external_reference_id=ext_reference["id"], ) # Create Observables and link them to Indicator observable_url = self.helper.api.stix_observable.create( type="URL", observable_value=parsed_entry["url"], createdByRef=organization["id"], markingDefinitions=[tlp["id"]], update=self.update_data, ) self.helper.api.stix_entity.add_external_reference( id=observable_url["id"], external_reference_id=ext_reference["id"], ) self.helper.api.indicator.add_stix_observable( id=indicator["id"], stix_observable_id=observable_url["id"], ) observable_ip = self.helper.api.stix_observable.create( type="IPv4-Addr", observable_value=parsed_entry["ip"], createdByRef=organization["id"], markingDefinitions=[tlp["id"]], update=self.update_data, ) self.helper.api.stix_entity.add_external_reference( id=observable_ip["id"], external_reference_id=ext_reference["id"], ) self.helper.api.indicator.add_stix_observable( id=indicator["id"], stix_observable_id=observable_ip["id"], ) if "domain" in parsed_entry.keys(): observable_domain = self.helper.api.stix_observable.create( type="Domain", observable_value=parsed_entry["domain"], createdByRef=organization["id"], markingDefinitions=[tlp["id"]], update=self.update_data, ) self.helper.api.stix_entity.add_external_reference( id=observable_domain["id"], external_reference_id=ext_reference["id"], ) self.helper.api.indicator.add_stix_observable( id=indicator["id"], stix_observable_id=observable_domain["id"], ) self.helper.api.stix_relation.create( fromType="Domain", fromId=observable_domain["id"], toType="IPv4-Addr", toId=observable_ip["id"], relationship_type="resolves", last_seen=self._time_to_datetime( entry["published_parsed"]), weight=self.confidence_level, createdByRef=organization["id"], created=parsed_entry["date"], modified=parsed_entry["date"], update=self.update_data, ) # Store the current timestamp as a last run self.helper.log_info("Connector successfully run, \ storing last_run as: {}".format(str(timestamp))) self.helper.set_state({"last_run": timestamp}) self.helper.log_info( "Last_run stored, next run in: {} seconds.".format( str(round(self.interval, 2)))) new_state = {"last_run": int(time.time())} self.helper.set_state(new_state) time.sleep(60) else: new_interval = self.interval - (timestamp - last_run) self.helper.log_info("Connector will not run. \ Next run in: {} seconds.".format( str(round(new_interval, 2)))) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class CyberThreatCoalition: _OPENCTI_TYPE = { "domain": "[domain-name:value = '{}']", "ipv4-addr": "[ipv4-addr:value = '{}']", "file-sha256": "[file:hashes.SHA256 = '{}']", "file-sha1": "[file:hashes.SHA1 = '{}']", "file-md5": "[file:hashes.MD5 = '{}']", "url": "[url:value = '{}']", } _STATE_LAST_RUN = "last_run" def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.cyber_threat_coalition_interval = get_config_variable( "CYBER_THREAT_COALITION_INTERVAL", ["cyber-threat-coalition", "interval_sec"], config, True, ) self.cyber_threat_coalition_base_url = get_config_variable( "CYBER_THREAT_COALITION_BASE_URL", ["cyber-threat-coalition", "base_url"], config, False, ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self) -> int: return int(self.cyber_threat_coalition_interval) @staticmethod def get_hash_type(hash_value): if re.match(r"^[0-9a-fA-F]{32}$", hash_value): return "file-md5" elif re.match(r"^[0-9a-fA-F]{40}$", hash_value): return "file-sha1" elif re.match(r"^[0-9a-fA-F]{64}$", hash_value): return "file-sha256" def fetch_and_send(self): bundle_objects = list() # create an identity for the coalition team organization = stix2.Identity( name="Cyber Threat Coalition Team", identity_class="organization", description="Team of Experts collecting and sharing pandemic related " "cyber threat intelligence during the COVID-19 crisis time", ) # add organization in bundle bundle_objects.append(organization) report_object_refs = list() for collection in ["domain", "ip", "url", "hash"]: # fetch backlist url = self.cyber_threat_coalition_base_url + "/" + str(collection) + ".txt" response = requests.get(url=url) if response.status_code != 200: raise Exception( "Unable to fetch {0} blacklist, server returned status: {1}", collection, response.status_code, ) opencti_type = None pattern_type = "stix" tags = [{"tag_type": "Event", "value": "COVID-19", "color": "#fc036b"}] # parse content for data in response.iter_lines(decode_unicode=True): if data and not data.startswith("#"): if collection == "domain": opencti_type = "domain" elif collection == "ip": opencti_type = "ipv4-addr" elif collection == "url": opencti_type = "url" data = urllib.parse.quote(data, "/:") elif collection == "hash": opencti_type = self.get_hash_type(data) try: indicator = stix2.Indicator( name=data, pattern=self._OPENCTI_TYPE[opencti_type].format(data), labels=["malicious-activity"], created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], custom_properties={ CustomProperties.OBSERVABLE_TYPE: opencti_type, CustomProperties.OBSERVABLE_VALUE: data, CustomProperties.PATTERN_TYPE: pattern_type, CustomProperties.TAG_TYPE: tags, }, ) except Exception as ex: self.helper.log_error( "an exception occurred while converting data to STIX indicator " "for data.value: {} , skipping IOC, exception: {}".format( data, ex ) ) continue # add indicator in bundle and report_refs bundle_objects.append(indicator) report_object_refs.append(indicator["id"]) # create a global threat report report_uuid = "report--552b3ae6-8522-409d-8b72-a739bc1926aa" report_external_reference = stix2.ExternalReference( source_name="Cyber Threat Coalition", url="https://www.cyberthreatcoalition.org", external_id="COVID19-CTC", ) stix_report = stix2.Report( id=report_uuid, name="COVID-19 Cyber Threat Coalition (CTC) BlackList", type="report", description="This report represents the whole COVID-19 CTC blacklist.", published=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], labels=["threat-report"], external_references=[report_external_reference], object_refs=report_object_refs, custom_properties={CustomProperties.TAG_TYPE: tags,}, ) # add report in bundle bundle_objects.append(stix_report) # create stix bundle bundle = stix2.Bundle(objects=bundle_objects) # send data self.helper.send_stix2_bundle( bundle=bundle.serialize(), update=self.update_existing_data ) def _load_state(self) -> Dict[str, Any]: current_state = self.helper.get_state() if not current_state: return {} return current_state def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool: if last_run is None: return True time_diff = current_time - last_run return time_diff >= self.get_interval() @staticmethod def _get_state_value( state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None ) -> Any: if state is not None: return state.get(key, default) return default @staticmethod def _current_unix_timestamp() -> int: return int(time.time()) def run(self): self.helper.log_info("Fetching Cyber Threat Coalition vetted blacklists...") while True: try: timestamp = self._current_unix_timestamp() current_state = self._load_state() self.helper.log_info(f"Loaded state: {current_state}") last_run = self._get_state_value(current_state, self._STATE_LAST_RUN) if self._is_scheduled(last_run, timestamp): # fetch data and send as stix bundle self.fetch_and_send() new_state = current_state.copy() new_state[self._STATE_LAST_RUN] = self._current_unix_timestamp() self.helper.log_info(f"Storing new state: {new_state}") self.helper.set_state(new_state) self.helper.log_info( f"State stored, next run in: {self.get_interval()} seconds" ) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( f"Connector will not run, next run in: {new_interval} seconds" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as ex: self.helper.log_error(str(ex)) time.sleep(60)
class OpenCTI: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.SafeLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.opencti_sectors_file_url = get_config_variable( "CONFIG_SECTORS_FILE_URL", ["config", "sectors_file_url"], config ) self.opencti_geography_file_url = get_config_variable( "CONFIG_GEOGRAPHY_FILE_URL", ["config", "geography_file_url"], config ) self.opencti_interval = get_config_variable( "CONFIG_INTERVAL", ["config", "interval"], config, True ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self): return int(self.opencti_interval) * 60 * 60 * 24 def run(self): self.helper.log_info("Fetching OpenCTI datasets...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info( "Connector last run: " + datetime.utcfromtimestamp(last_run).strftime( "%Y-%m-%d %H:%M:%S" ) ) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ( (timestamp - last_run) > ((int(self.opencti_interval) - 1) * 60 * 60 * 24) ): now = datetime.utcfromtimestamp(timestamp) friendly_name = "OpenCTI datasets run @ " + now.strftime( "%Y-%m-%d %H:%M:%S" ) work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name ) try: sectors_data = urllib.request.urlopen( self.opencti_sectors_file_url ).read() self.helper.send_stix2_bundle( sectors_data.decode("utf-8"), entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) except Exception as e: self.helper.log_error(str(e)) try: geography_data = urllib.request.urlopen( self.opencti_geography_file_url ).read() self.helper.send_stix2_bundle( geography_data.decode("utf-8"), entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp ) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days" ) time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Mitre: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader ) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) # Extra config self.mitre_enterprise_file_url = os.getenv( 'MITRE_ENTERPRISE_FILE_URL' ) or config['mitre']['enterprise_file_url'] self.mitre_pre_attack_file_url = os.getenv( 'MITRE_PRE_ATTACK_FILE_URL' ) or config['mitre']['pre_attack_file_url'] self.mitre_interval = os.getenv( 'MITRE_INTERVAL') or config['mitre']['interval'] def get_interval(self): return int(self.mitre_interval) * 60 * 60 * 24 def run(self): self.helper.log_info('Fetching MITRE datasets...') while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and 'last_run' in current_state: last_run = current_state['last_run'] self.helper.log_info('Connector last run: ' + datetime.utcfromtimestamp(last_run). strftime('%Y-%m-%d %H:%M:%S')) else: last_run = None self.helper.log_info('Connector has never run') # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.mitre_interval) - 1) * 60 * 60 * 24)): self.helper.log_info('Connector will run!') enterprise_data = urllib.request.urlopen( self.mitre_enterprise_file_url).read().decode('utf-8') self.helper.send_stix2_bundle(enterprise_data, self.helper.connect_scope) pre_attack_data = urllib.request.urlopen( self.mitre_pre_attack_file_url).read() self.helper.send_stix2_bundle( pre_attack_data.decode('utf-8'), self.helper.connect_scope) # Store the current timestamp as a last run self.helper.log_info( 'Connector successfully run, storing last_run as ' + str(timestamp)) self.helper.set_state({'last_run': timestamp}) # Sleep all interval self.helper.log_info( 'Last_run stored, sleeping for: ' + str(round(self.get_interval() / 60 / 60 / 24, 2)) + ' days') time.sleep(self.get_interval()) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( 'Connector will not run, sleeping for: ' + str(round(new_interval / 60 / 60 / 24, 2)) + ' days') # Sleep only remaining time time.sleep(new_interval) except (KeyboardInterrupt, SystemExit): self.helper.log_info('Connector stop') exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(self.get_interval())
class TaniumConnector: def __init__(self): config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.tanium_url = get_config_variable("TANIUM_URL", ["tanium", "url"], config) self.tanium_ssl_verify = get_config_variable( "TANIUM_SSL_VERIFY", ["tanium", "ssl_verify"], config, False, True ) self.tanium_login = get_config_variable( "TANIUM_LOGIN", ["tanium", "login"], config ) self.tanium_password = get_config_variable( "TANIUM_PASSWORD", ["tanium", "password"], config ) self.tanium_indicator_types = get_config_variable( "TANIUM_INDICATOR_TYPES", ["tanium", "indicator_types"], config ).split(",") self.tanium_observable_types = get_config_variable( "TANIUM_OBSERVABLE_TYPES", ["tanium", "observable_types"], config ).split(",") self.tanium_import_label = get_config_variable( "TANIUM_IMPORT_LABEL", ["tanium", "import_label"], config, False, "" ) self.tanium_import_from_date = get_config_variable( "TANIUM_IMPORT_FROM_DATE", ["tanium", "import_from_date"], config ) self.tanium_reputation_blacklist_label = get_config_variable( "TANIUM_REPUTATION_BLACKLIST_LABEL", ["tanium", "reputation_blacklist_label"], config, False, "", ) self.tanium_auto_quickscan = get_config_variable( "TANIUM_AUTO_QUICKSCAN", ["tanium", "auto_quickscan"], config, False, False ) self.tanium_computer_groups = get_config_variable( "TANIUM_COMPUTER_GROUPS", ["tanium", "computer_groups"], config, False, "" ).split(",") # Variables self.session = None # Open a session self._get_session() # Create the state if self.tanium_import_from_date: timestamp = ( parse(self.tanium_import_from_date).timestamp() * 1000 if self.tanium_import_from_date != "now" else int(round(time.time() * 1000)) - 1000 ) current_state = self.helper.get_state() if current_state is None: self.helper.set_state({"connectorLastEventId": timestamp}) # Create the source if not exist self.source_id = None sources = self._query("get", "/plugin/products/detect3/api/v1/sources") for source in sources: if source["name"] == "OpenCTI": self.source_id = source["id"] if self.source_id is None: source = self._query( "post", "/plugin/products/detect3/api/v1/sources", { "type": "api-client", "name": "OpenCTI", "description": "Cyber Threat Intelligence knowledge imported from OpenCTI.", "canAutoQuickScan": True, }, ) self.source_id = source["id"] def _get_session(self): payload = { "username": self.tanium_login, "password": self.tanium_password, } r = requests.post( self.tanium_url + "/api/v2/session/login", json=payload, verify=self.tanium_ssl_verify, ) if r.status_code == 200: result = r.json() self.session = result["data"]["session"] else: raise ValueError("Cannot login to the Tanium API") def _query( self, method, uri, payload=None, content_type="application/json", type=None, retry=False, ): self.helper.log_info("Query " + method + " on " + uri) headers = {"session": self.session} if method != "upload": headers["content-type"] = content_type if type is not None: headers["type"] = type if content_type == "application/octet-stream": headers["content-disposition"] = ( "attachment; filename=" + payload["filename"] ) if "name" in payload: headers["name"] = payload["name"] if "description" in payload: headers["description"] = payload["description"] if method == "get": r = requests.get( self.tanium_url + uri, headers=headers, params=payload, verify=self.tanium_ssl_verify, ) elif method == "post": if content_type == "application/octet-stream": r = requests.post( self.tanium_url + uri, headers=headers, data=payload["document"], verify=self.tanium_ssl_verify, ) elif type is not None: r = requests.post( self.tanium_url + uri, headers=headers, data=payload["intelDoc"], verify=self.tanium_ssl_verify, ) else: r = requests.post( self.tanium_url + uri, headers=headers, json=payload, verify=self.tanium_ssl_verify, ) elif method == "upload": f = open(payload["filename"], "w") f.write(payload["content"]) f.close() files = {"hash": open(payload["filename"], "rb")} r = requests.post( self.tanium_url + uri, headers=headers, files=files, verify=self.tanium_ssl_verify, ) elif method == "put": if content_type == "application/xml": r = requests.put( self.tanium_url + uri, headers=headers, data=payload, verify=self.tanium_ssl_verify, ) else: r = requests.put( self.tanium_url + uri, headers=headers, json=payload, verify=self.tanium_ssl_verify, ) elif method == "patch": r = requests.patch( self.tanium_url + uri, headers=headers, json=payload, verify=self.tanium_ssl_verify, ) elif method == "delete": r = requests.delete( self.tanium_url + uri, headers=headers, verify=self.tanium_ssl_verify ) else: raise ValueError("Unspported method") if r.status_code == 200: try: return r.json() except: return r.text elif r.status_code == 401 and not retry: self._get_session() return self._query(method, uri, payload, content_type, type, True) elif r.status_code == 401: raise ValueError("Query failed, permission denied") else: self.helper.log_info(r.text) def _get_labels(self, labels): # List labels tanium_labels = self._query( "get", "/plugin/products/detect3/api/v1/labels", {"limit": 500} ) tanium_labels_dict = {} for tanium_label in tanium_labels: tanium_labels_dict[tanium_label["name"].lower()] = tanium_label final_labels = [] for label in labels: # Label already exists if label["value"] in tanium_labels_dict: final_labels.append(tanium_labels_dict[label["value"]]) # Create the label else: created_label = self._query( "post", "/plugin/products/detect3/api/v1/labels", { "name": label["value"], "description": "Label imported from OpenCTI", }, ) final_labels.append(created_label) return final_labels def _get_by_id(self, internal_id, yara=False): if yara: response = self._query( "get", "/plugin/products/detect3/api/v1/intels", {"name": internal_id + ".yara"}, ) else: response = self._query( "get", "/plugin/products/detect3/api/v1/intels", {"description": internal_id}, ) if response and len(response) > 0: return response[0] else: return None def _get_reputation_by_hash(self, hash): response = self._query( "get", "/plugin/products/reputation/v3/reputations/custom", {"search": hash}, ) if response["data"] and len(response["data"]) > 0: return response["data"][0] else: return None def _create_indicator_stix(self, entity, original_intel_document=None): if original_intel_document is None: intel_document = self._get_by_id(entity["id"]) if intel_document is not None: return intel_document stix2_bundle = self.helper.api.stix2.export_entity( entity["entity_type"], entity["id"], "simple", None, True, True, ) initialize_options() stix_indicator = slide_string(stix2_bundle) stix_indicator = re.sub( r"<indicator:Description>(.*?)<\/indicator:Description>", r"<indicator:Description>" + entity["id"] + "</indicator:Description>", stix_indicator, ) stix_indicator = re.sub( r"<indicator:Description ordinality=\"1\">(.*?)<\/indicator:Description>", r'<indicator:Description ordinality="1">' + entity["id"] + "</indicator:Description>", stix_indicator, ) payload = {"intelDoc": stix_indicator} if original_intel_document is not None: intel_document = self._query( "put", "/plugin/products/detect3/api/v1/intels/" + str(original_intel_document["id"]), stix_indicator, "application/xml", "stix", ) else: intel_document = self._query( "post", "/plugin/products/detect3/api/v1/sources/" + str(self.source_id) + "/intels", payload, "application/xml", "stix", ) return intel_document def _create_indicator_yara(self, entity, original_intel_document=None): if original_intel_document is None: intel_document = self._get_by_id(entity["id"], True) if intel_document is not None: return intel_document filename = entity["id"] + ".yara" if original_intel_document is not None: intel_document = self._query( "put", "/plugin/products/detect3/api/v1/intels/" + str(original_intel_document["id"]), { "filename": filename, "document": entity["pattern"], "name": entity["name"], "description": entity["id"], }, "application/octet-stream", "yara", ) else: intel_document = self._query( "post", "/plugin/products/detect3/api/v1/sources/" + str(self.source_id) + "/intels", { "filename": filename, "document": entity["pattern"], "name": entity["name"], "description": entity["id"], }, "application/octet-stream", "yara", ) return intel_document def _create_tanium_signal(self, entity, original_intel_document=None): if original_intel_document is None: intel_document = self._get_by_id(entity["id"]) if intel_document is not None: return intel_document platforms = [] if "x_mitre_platforms" in entity and len(entity["x_mitre_platforms"]) > 0: for x_mitre_platform in entity["x_mitre_platforms"]: if x_mitre_platform in ["Linux", "Windows", "macOS"]: platforms.append( x_mitre_platform.lower() if x_mitre_platform != "macOS" else "mac" ) if original_intel_document is not None: intel_document = self._query( "put", "/plugin/products/detect3/api/v1/intels/" + str(original_intel_document["id"]), { "name": entity["name"], "description": entity["id"], "platforms": platforms, "contents": entity["pattern"], }, ) else: intel_document = self._query( "post", "/plugin/products/detect3/api/v1/sources/" + str(self.source_id) + "/intels", { "name": entity["name"], "description": entity["id"], "platforms": platforms, "contents": entity["pattern"], }, ) return intel_document def _create_observable(self, entity, original_intel_document=None): if original_intel_document is None: intel_document = self._get_by_id(entity["id"]) if intel_document is not None: return intel_document intel_type = None value = None name = None if entity["entity_type"] == "StixFile": intel_type = "file_hash" if "hashes" in entity: for hash in entity["hashes"]: value = ( value + hash["hash"] + "\n" if value is not None else hash["hash"] + "\n" ) name = hash["hash"] elif entity["entity_type"] in [ "IPv4-Addr", "IPv6-Addr", "Domain-Name", "X-OpenCTI-Hostname", ]: intel_type = "ip_or_host" value = entity["value"] name = entity["value"] if intel_type is None or value is None: return None openioc = self._query( "post", "/plugin/products/detect3/api/v1/intels/quick-add", { "exact": True, "name": name, "description": entity["id"], "type": intel_type, "text": value, }, ) openioc = re.sub( r"<description>(.*?)<\/description>", r"<description>" + entity["id"] + "</description>", openioc, ) payload = {"intelDoc": openioc} if original_intel_document is not None: intel_document = self._query( "put", "/plugin/products/detect3/api/v1/intels/" + str(original_intel_document["id"]), payload, "application/xml", "openioc", ) else: intel_document = self._query( "post", "/plugin/products/detect3/api/v1/sources/" + str(self.source_id) + "/intels", payload, "application/xml", "openioc", ) return intel_document def _post_operations(self, entity, intel_document): if intel_document is not None and entity is not None: if self.tanium_auto_quickscan: for computer_group in self.tanium_computer_groups: self._query( "post", "/plugin/products/detect3/api/v1/quick-scans", { "computerGroupId": int(computer_group), "intelDocId": intel_document["id"], }, ) external_reference = self.helper.api.external_reference.create( source_name="Tanium", url=self.tanium_url + "/#/thr_workbench/intel/" + str(intel_document["id"]), external_id=str(intel_document["id"]), description="Intel document within the Tanium platform.", ) if entity["entity_type"] == "Indicator": self.helper.api.stix_domain_object.add_external_reference( id=entity["id"], external_reference_id=external_reference["id"] ) else: self.helper.api.stix_cyber_observable.add_external_reference( id=entity["id"], external_reference_id=external_reference["id"] ) if len(entity["objectLabel"]) > 0: labels = self._get_labels(entity["objectLabel"]) for label in labels: if label is not None: self._query( "put", "/plugin/products/detect3/api/v1/intels/" + str(intel_document["id"]) + "/labels", {"id": label["id"]}, ) def _process_intel(self, entity_type, data, original_intel_document=None): entity = None intel_document = None if entity_type == "indicator": entity = self.helper.api.indicator.read(id=data["data"]["x_opencti_id"]) if ( entity is None or entity["revoked"] or entity["pattern_type"] not in self.tanium_indicator_types ): return {"entity": entity, "intel_document": intel_document} if entity["pattern_type"] == "stix": intel_document = self._create_indicator_stix( entity, original_intel_document ) elif entity["pattern_type"] == "yara": intel_document = self._create_indicator_yara( entity, original_intel_document ) elif entity["pattern_type"] == "tanium-signal": intel_document = self._create_tanium_signal( entity, original_intel_document ) elif ( StixCyberObservableTypes.has_value(entity_type) and entity_type.lower() in self.tanium_observable_types ): entity = self.helper.api.stix_cyber_observable.read( id=data["data"]["x_opencti_id"] ) if entity is None: return {"entity": entity, "intel_document": intel_document} intel_document = self._create_observable(entity, original_intel_document) return {"entity": entity, "intel_document": intel_document} def _process_message(self, msg): data = json.loads(msg.data) entity_type = data["data"]["type"] # If not an indicator, not an observable to import and if ( entity_type != "indicator" and entity_type not in self.tanium_observable_types and ( "labels" in data["data"] and self.tanium_reputation_blacklist_label not in data["data"]["labels"] ) and self.tanium_reputation_blacklist_label != "*" ): self.helper.log_info( "Not an indicator and not an observable to import, doing nothing" ) return # Handle creation if msg.event == "create": # No label if ( "labels" not in data["data"] and self.tanium_import_label != "*" and self.tanium_reputation_blacklist_label != "*" ): self.helper.log_info("No label marked as import, doing nothing") return # Import or blacklist labels are not in the given labels elif ( ( "labels" in data["data"] and self.tanium_import_label not in data["data"]["labels"] ) and self.tanium_import_label != "*" and self.tanium_reputation_blacklist_label not in data["data"]["labels"] and self.tanium_reputation_blacklist_label != "*" ): self.helper.log_info( "No label marked as import or no global label, doing nothing" ) return # Revoked is true elif "revoked" in data["data"] and data["data"]["revoked"]: return if ( "labels" in data["data"] and self.tanium_import_label in data["data"]["labels"] ) or self.tanium_import_label == "*": # Process intel processed_intel = self._process_intel(entity_type, data) intel_document = processed_intel["intel_document"] entity = processed_intel["entity"] # Create external reference and add object labels self._post_operations(entity, intel_document) if ( "labels" in data["data"] and self.tanium_reputation_blacklist_label in data["data"]["labels"] ) or self.tanium_reputation_blacklist_label == "*": if "hashes" in data["data"]: entry = {"list": "blacklist"} if "MD5" in data["data"]["hashes"]: entry["md5"] = data["data"]["hashes"]["MD5"] entry["uploadedHash"] = data["data"]["hashes"]["MD5"] else: entry["md5"] = "" if "SHA-1" in data["data"]["hashes"]: entry["sha1"] = data["data"]["hashes"]["SHA-1"] entry["uploadedHash"] = data["data"]["hashes"]["SHA-1"] else: entry["sha1"] = "" if "SHA-256" in data["data"]["hashes"]: entry["sha256"] = data["data"]["hashes"]["SHA-256"] entry["uploadedHash"] = data["data"]["hashes"]["SHA-256"] else: entry["sha256"] = "" entry["notes"] = ",".join(data["data"]["labels"]) self._query( "post", "/plugin/products/reputation/v3/reputations/custom/upload?append=true", [entry], ) elif msg.event == "update": if ( "x_data_update" in data["data"] and "add" in data["data"]["x_data_update"] and "labels" in data["data"]["x_data_update"]["add"] ): if self.tanium_reputation_blacklist_label in data["data"][ "x_data_update" ]["add"]["labels"] and StixCyberObservableTypes.has_value( data["data"]["type"] ): observable = self.helper.api.stix_cyber_observable.read( id=data["data"]["id"] ) observable = self.helper.api.stix2.generate_export(observable) if "hashes" in observable: entry = {"list": "blacklist"} if "MD5" in observable["hashes"]: entry["md5"] = observable["hashes"]["MD5"] entry["uploadedHash"] = observable["hashes"]["MD5"] else: entry["md5"] = "" if "SHA-1" in observable["hashes"]: entry["sha1"] = observable["hashes"]["SHA-1"] entry["uploadedHash"] = observable["hashes"]["SHA-1"] else: entry["sha1"] = "" if "SHA-256" in observable["hashes"]: entry["sha256"] = observable["hashes"]["SHA-256"] entry["uploadedHash"] = observable["hashes"]["SHA-256"] else: entry["sha256"] = "" entry["notes"] = ",".join(observable["labels"]) self._query( "post", "/plugin/products/reputation/v3/reputations/custom/upload?append=true", [entry], ) if ( self.tanium_import_label in data["data"]["x_data_update"]["add"]["labels"] ): # Process intel processed_intel = self._process_intel(entity_type, data) intel_document = processed_intel["intel_document"] entity = processed_intel["entity"] # Create external reference and add object labels self._post_operations(entity, intel_document) else: entity = self.helper.api.indicator.read( id=data["data"]["x_opencti_id"], customAttributes=""" pattern_type """, ) intel_document = self._get_by_id( data["data"]["x_opencti_id"], yara=True if entity is not None and entity["pattern_type"] == "yara" else False, ) if intel_document: new_labels = [] for label in data["data"]["x_data_update"]["add"]["labels"]: new_labels.append({"value": label}) labels = self._get_labels(new_labels) for label in labels: self._query( "put", "/plugin/products/detect3/api/v1/intels/" + str(intel_document["id"]) + "/labels", {"id": label["id"]}, ) elif ( "x_data_update" in data["data"] and "remove" in data["data"]["x_data_update"] and "labels" in data["data"]["x_data_update"]["remove"] ): if ( self.tanium_reputation_blacklist_label in data["data"]["x_data_update"]["remove"]["labels"] ): if "hashes" in data["data"]: if "SHA-256" in data["data"]["hashes"]: self._query( "post", "/plugin/products/reputation/v3/reputations/custom/delete", [data["data"]["hashes"]["SHA-256"]], ) if "SHA-1" in data["data"]["hashes"]: self._query( "post", "/plugin/products/reputation/v3/reputations/custom/delete", [data["data"]["hashes"]["SHA-1"]], ) if "MD5" in data["data"]["hashes"]: self._query( "post", "/plugin/products/reputation/v3/reputations/custom/delete", [data["data"]["hashes"]["MD5"]], ) if ( self.tanium_import_label in data["data"]["x_data_update"]["remove"]["labels"] ): # Import label has been removed intel_document = self._get_by_id(data["data"]["x_opencti_id"]) if intel_document is not None: self._query( "delete", "/plugin/products/detect3/api/v1/intels/" + str(intel_document["id"]), ) # Remove external references if entity_type == "indicator": entity = self.helper.api.indicator.read( id=data["data"]["x_opencti_id"] ) else: entity = self.helper.api.stix_cyber_observable.read( id=data["data"]["x_opencti_id"] ) if ( entity and "externalReferences" in entity and len(entity["externalReferences"]) > 0 ): for external_reference in entity["externalReferences"]: if external_reference["source_name"] == "Tanium": self.helper.api.external_reference.delete( external_reference["id"] ) else: intel_document = self._get_by_id(data["data"]["x_opencti_id"]) if intel_document: new_labels = [] for label in data["data"]["x_data_update"]["remove"]["labels"]: new_labels.append({"value": label}) labels = self._get_labels(new_labels) for label in labels: self._query( "delete", "/plugin/products/detect3/api/v1/intels/" + str(intel_document["id"]) + "/labels/" + str(label["id"]), ) elif ( "x_data_update" in data["data"] and "replace" in data["data"]["x_data_update"] ): if entity_type == "indicator": if "pattern" in data["data"]["x_data_update"]["replace"]: intel_document = self._get_by_id(data["data"]["x_opencti_id"]) if intel_document is not None: self._process_intel(entity_type, data, intel_document) elif ( "value" in data["data"]["x_data_update"]["replace"] or "hashes" in data["data"]["x_data_update"]["replace"] ): intel_document = self._get_by_id(data["data"]["x_opencti_id"]) if intel_document is not None: self._process_intel(entity_type, data, intel_document) elif ( "revoked" in data["data"]["x_data_update"]["replace"] and data["data"]["x_data_update"]["replace"]["revoked"] == True ): intel_document = self._get_by_id(data["data"]["x_opencti_id"]) if intel_document is not None: self._query( "delete", "/plugin/products/detect3/api/v1/intels/" + str(intel_document["id"]), ) # Remove external references if entity_type == "indicator": entity = self.helper.api.indicator.read( id=data["data"]["x_opencti_id"] ) else: entity = self.helper.api.stix_cyber_observable.read( id=data["data"]["x_opencti_id"] ) if ( entity and "externalReferences" in entity and len(entity["externalReferences"]) > 0 ): for external_reference in entity["externalReferences"]: if external_reference["source_name"] == "Tanium": self.helper.api.external_reference.delete( external_reference["id"] ) elif msg.event == "delete": intel_document = self._get_by_id(data["data"]["x_opencti_id"]) if intel_document is not None: self._query( "delete", "/plugin/products/detect3/api/v1/intels/" + str(intel_document["id"]), ) if data["data"]["type"] == "file": if "hashes" in data["data"]: if "SHA-256" in data["data"]["hashes"]: self._query( "post", "/plugin/products/reputation/v3/reputations/custom/delete", [data["data"]["hashes"]["SHA-256"]], ) if "SHA-1" in data["data"]["hashes"]: self._query( "post", "/plugin/products/reputation/v3/reputations/custom/delete", [data["data"]["hashes"]["SHA-1"]], ) if "MD5" in data["data"]["hashes"]: self._query( "post", "/plugin/products/reputation/v3/reputations/custom/delete", [data["data"]["hashes"]["MD5"]], ) def start(self): self.alerts_gatherer = TaniumConnectorAlertsGatherer( self.helper, self.tanium_url, self.tanium_login, self.tanium_password, self.tanium_ssl_verify, ) self.alerts_gatherer.start() self.helper.listen_stream(self._process_message)
class Cybercrimetracker: def __init__(self): # Instantiate the connector helper from config config_file_path = "{}/config.yml".format( os.path.dirname(os.path.abspath(__file__)) ) config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Connector Config self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, isNumber=True, ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) # CYBERCRIME-TRACKER.NET Config self.feed_url = get_config_variable( "CYBERCRIMET_RACKER_FEED_URL", ["cybercrime-tracker", "feed_url"], config ) self.connector_tlp = get_config_variable( "CYBERCRIME_TRACKER_TLP", ["cybercrime-tracker", "tlp"], config ) self.create_indicators = get_config_variable( "CYBERCRIME_TRACKER_CREATE_INDICATORS", ["cybercrime-tracker", "create_indicators"], config, ) self.create_observables = get_config_variable( "CYBERCRIME_TRACKER_CREATE_OBSERVABLES", ["cybercrime-tracker", "create_observables"], config, ) self.interval = get_config_variable( "CYBERCRIMETRACKER_INTERVAL", ["cybercrime-tracker", "interval"], config, isNumber=True, ) @staticmethod def _time_to_datetime(input_date: time) -> datetime.datetime: return datetime.datetime( input_date.tm_year, input_date.tm_mon, input_date.tm_mday, input_date.tm_hour, input_date.tm_min, input_date.tm_sec, tzinfo=datetime.timezone.utc, ) def parse_feed_entry(self, entry): """ Parses an entry from the feed and returns a dict with: date: date in iso format type: name of the malware associated with the C2 server url: the url of the C2 ip: the IP address of the C2 ext_link: An external link to CYBERCRIME-TRACKER.NET with details Note: CYBERCRIME-TRACKER.NET does not provide the protocol in the url as such we always assume 'http'. """ parsed_entry = {} pattern = ( r"(?:\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type}" + r"\s+-%{GREEDYDATA}:\s+%{IP:ip}|" + r"\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type})" ) entry_summary = Grok(pattern).match(entry["summary"]) if entry_summary: parsed_entry["date"] = self._time_to_datetime(entry["published_parsed"]) parsed_entry["type"] = entry_summary["type"] parsed_entry["ext_link"] = entry["link"] parsed_entry["url"] = "http://{}".format(quote(entry["title"])) hostname = urlparse(parsed_entry["url"]).hostname if entry_summary["ip"] is None: parsed_entry["ip"] = hostname else: parsed_entry["ip"] = entry_summary["ip"] parsed_entry["domain"] = hostname self.helper.log_info("Parsed entry: {}".format(entry["title"])) return parsed_entry else: self.helper.log_error("Could not parse: {}".format(entry["title"])) return False def gen_indicator_pattern(self, parsed_entry): if "domain" in parsed_entry.keys(): indicator_pattern = ( "[ipv4-addr:value='{}'] ".format(parsed_entry["ip"]) + "AND [url:value='{}'] ".format(parsed_entry["url"]) + "AND [domain-name:value='{}']".format(parsed_entry["domain"]) ) else: indicator_pattern = "[ipv4-addr:value='{}'] ".format( parsed_entry["ip"] ) + "AND [url:value='{}']".format(parsed_entry["url"]) return indicator_pattern def run(self): self.helper.log_info("Fetching data CYBERCRIME-TRACKER.NET...") tlp = self.helper.api.marking_definition.read( filters=[ {"key": "definition", "values": "TLP:{}".format(self.connector_tlp)} ] ) while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info( "Connector last run: {}".format( datetime.datetime.utcfromtimestamp(last_run).strftime( "%Y-%m-%d %H:%M:%S" ) ) ) else: last_run = None self.helper.log_info("Connector has never run") # Run if it is the first time or we are past the interval if last_run is None or ((timestamp - last_run) > self.interval): self.helper.log_info("Connector will run!") now = datetime.datetime.utcfromtimestamp(timestamp) friendly_name = "MITRE run @ " + now.strftime("%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name ) # Get Feed Content feed = feedparser.parse(self.feed_url) self.helper.log_info( "Found: {} entries.".format(len(feed["entries"])) ) self.feed_summary = { "Source": feed["feed"]["title"], "Date": self._time_to_datetime( feed["feed"]["published_parsed"] ), "Details": feed["feed"]["subtitle"], "Link": feed["feed"]["link"], } # Create the bundle bundle_objects = list() organization = stix2.Identity( id=OpenCTIStix2Utils.generate_random_stix_id("identity"), name="CYBERCRIME-TRACKER.NET", identity_class="organization", description="Tracker collecting and sharing daily updates of C2 IPs/Urls. http://cybercrime-tracker.net", ) bundle_objects.append(organization) for entry in feed["entries"]: parsed_entry = self.parse_feed_entry(entry) external_reference = stix2.ExternalReference( source_name="{}".format(self.feed_summary["Source"]), url=parsed_entry["ext_link"], ) indicator_pattern = self.gen_indicator_pattern(parsed_entry) malware = stix2.Malware( id=OpenCTIStix2Utils.generate_random_stix_id("malware"), is_family=True, name=parsed_entry["type"], description="{} malware.".format(parsed_entry["type"]), ) bundle_objects.append(malware) indicator = None if self.create_indicators: indicator = stix2.Indicator( id=OpenCTIStix2Utils.generate_random_stix_id( "indicator" ), name=parsed_entry["url"], description="C2 URL for: {}".format( parsed_entry["type"] ), labels=["C2 Server"], pattern_type="stix", pattern=indicator_pattern, valid_from=parsed_entry["date"], created=parsed_entry["date"], modified=parsed_entry["date"], created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], external_references=[external_reference], custom_properties={ "x_opencti_main_observable_type": "Url" }, ) bundle_objects.append(indicator) relation = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), source_ref=indicator.id, target_ref=malware.id, relationship_type="indicates", start_time=self._time_to_datetime( entry["published_parsed"] ), stop_time=self._time_to_datetime( entry["published_parsed"] ) + datetime.timedelta(0, 3), description="URLs associated to: " + parsed_entry["type"], confidence=self.confidence_level, created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], created=parsed_entry["date"], modified=parsed_entry["date"], external_references=[external_reference], ) bundle_objects.append(relation) if self.create_observables: observable_url = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable" ), key="Url.value", labels=["C2 Server"], value=parsed_entry["url"], created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], external_references=[external_reference], ) bundle_objects.append(observable_url) observable_ip = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable" ), key="IPv4-Addr.value", labels=["C2 Server"], value=parsed_entry["ip"], created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], external_references=[external_reference], ) bundle_objects.append(observable_ip) observable_domain = None if "domain" in parsed_entry.keys(): observable_domain = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable" ), key="Domain-Name.value", labels=["C2 Server"], value=parsed_entry["domain"], created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], external_references=[external_reference], ) bundle_objects.append(observable_domain) if indicator is not None: relationship_1 = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), relationship_type="based-on", created_by_ref=organization.id, source_ref=indicator.id, target_ref=observable_url.id, ) bundle_objects.append(relationship_1) relationship_2 = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), relationship_type="based-on", created_by_ref=organization.id, source_ref=indicator.id, target_ref=observable_ip.id, ) bundle_objects.append(relationship_2) if observable_domain is not None: relationship_3 = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), relationship_type="based-on", created_by_ref=organization.id, source_ref=indicator.id, target_ref=observable_domain.id, ) bundle_objects.append(relationship_3) # create stix bundle bundle = stix2.Bundle(objects=bundle_objects) # send data self.helper.send_stix2_bundle( bundle=bundle.serialize(), update=self.update_existing_data, work_id=work_id, ) # Store the current timestamp as a last run message = ( "Connector successfully run, storing last_run as: {}".format( str(timestamp) ) ) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: {} seconds.".format( str(round(self.interval, 2)) ) ) time.sleep(60) else: new_interval = self.interval - (timestamp - last_run) self.helper.log_info( "Connector will not run. \ Next run in: {} seconds.".format( str(round(new_interval, 2)) ) ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Misp: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) # Extra config self.misp_url = get_config_variable('MISP_URL', ['misp', 'url'], config) self.misp_key = get_config_variable('MISP_KEY', ['misp', 'key'], config) self.misp_ssl_verify = get_config_variable('MISP_SSL_VERIFY', ['misp', 'ssl_verify'], config) self.misp_create_report = get_config_variable('MISP_CREATE_REPORTS', ['misp', 'create_reports'], config) self.misp_report_class = get_config_variable( 'MISP_REPORT_CLASS', ['misp', 'report_class'], config ) or 'MISP Event' self.misp_import_from_date = get_config_variable('MISP_IMPORT_FROM_DATE', ['misp', 'import_from_date'], config) self.misp_import_tags = get_config_variable('MISP_IMPORT_TAGS', ['misp', 'import_tags'], config) self.misp_interval = get_config_variable('MISP_INTERVAL', ['misp', 'interval'], config, True) self.update_existing_data = get_config_variable( 'CONNECTOR_UPDATE_EXISTING_DATA', ['connector', 'update_existing_data'], config ) # Initialize MISP self.misp = ExpandedPyMISP(url=self.misp_url, key=self.misp_key, ssl=self.misp_ssl_verify, debug=False) def get_interval(self): return int(self.misp_interval) * 60 def run(self): while True: timestamp = int(time.time()) # Get the last_run datetime current_state = self.helper.get_state() if current_state is not None and 'last_run' in current_state: last_run = datetime.utcfromtimestamp(current_state['last_run']).strftime('%Y-%m-%d %H:%M:%S') self.helper.log_info( 'Connector last run: ' + last_run) else: last_run = None self.helper.log_info('Connector has never run') # If import with tags complex_query_tag = None if self.misp_import_tags is not None: or_parameters = [] for tag in self.misp_import_tags.split(','): or_parameters.append(tag.strip()) complex_query_tag = self.misp.build_complex_query(or_parameters=or_parameters) # If import from a specific date import_from_date = None if self.misp_import_from_date is not None: import_from_date = parse(self.misp_import_from_date).strftime('%Y-%m-%d %H:%M:%S') # Prepare the query kwargs = dict() if complex_query_tag is not None: kwargs['tags'] = complex_query_tag if last_run is not None: kwargs['timestamp'] = last_run elif import_from_date is not None: kwargs['date_from'] = import_from_date # Query with pagination of 100 current_page = 1 while True: kwargs['limit'] = 100 kwargs['page'] = current_page self.helper.log_info('Fetching MISP events with args: ' + json.dumps(kwargs)) events = self.misp.search('events', **kwargs) self.helper.log_info('MISP returned ' + str(len(events)) + ' events.') # Break if no more result if len(events) == 0: break self.process_events(events) current_page += 1 # Set the last_run timestamp self.helper.set_state({'last_run': timestamp}) time.sleep(self.get_interval()) def process_events(self, events): for event in events: ### Default variables added_markings = [] added_entities = [] added_object_refs = [] ### Pre-process # Author author = Identity(name=event['Event']['Orgc']['name'], identity_class='organization') # Elements event_elements = self.prepare_elements(event['Event']['Galaxy']) # Markings if 'Tag' in event['Event']: event_markings = self.resolve_markings(event['Event']['Tag']) else: event_markings = [TLP_WHITE] # ExternalReference event_external_reference = ExternalReference( source_name=self.helper.connect_name, external_id=event['Event']['uuid'], url=self.misp_url + '/events/view/' + event['Event']['uuid']) ### Get indicators indicators = [] # Get attributes for attribute in event['Event']['Attribute']: indicator = self.process_attribute(author, event_elements, event_markings, attribute) if indicator is not None: indicators.append(indicator) # Get attributes of objects objects_relationships = [] for object in event['Event']['Object']: object_attributes = [] for attribute in object['Attribute']: indicator = self.process_attribute(author, event_elements, event_markings, attribute) if indicator is not None: indicators.append(indicator) if object['meta-category'] == 'file' and indicator[ 'indicator'].x_opencti_observable_type in FILETYPES: object_attributes.append(indicator) objects_relationships.extend(self.process_observable_relations(object_attributes, [])) ### Prepare the bundle bundle_objects = [author] object_refs = [] # Add event markings for event_marking in event_markings: if event_marking['id'] not in added_markings: bundle_objects.append(event_marking) added_markings.append(event_marking['id']) # Add event elements all_event_elements = \ event_elements['intrusion_sets'] + \ event_elements['malwares'] + \ event_elements['tools'] + \ event_elements['attack_patterns'] for event_element in all_event_elements: if event_element['name'] not in added_object_refs: object_refs.append(event_element) added_object_refs.append(event_element['name']) if event_element['name'] not in added_entities: bundle_objects.append(event_element) added_entities.append(event_element['name']) # Add indicators for indicator in indicators: if indicator['indicator']['id'] not in added_object_refs: object_refs.append(indicator['indicator']) added_object_refs.append(indicator['indicator']['id']) if indicator['indicator']['id'] not in added_entities: bundle_objects.append(indicator['indicator']) added_entities.append(indicator['indicator']['id']) # Add attribute markings for attribute_marking in indicator['markings']: if attribute_marking['id'] not in added_markings: bundle_objects.append(attribute_marking) added_markings.append(attribute_marking['id']) # Add attribute elements all_attribute_elements = \ indicator['attribute_elements']['intrusion_sets'] + \ indicator['attribute_elements']['malwares'] + \ indicator['attribute_elements']['tools'] + \ indicator['attribute_elements']['attack_patterns'] for attribute_element in all_attribute_elements: if attribute_element['name'] not in added_object_refs: object_refs.append(attribute_element) added_object_refs.append(attribute_element['name']) if attribute_element['name'] not in added_entities: bundle_objects.append(attribute_element) added_entities.append(attribute_element['name']) # Add attribute relationships for relationship in indicator['relationships']: object_refs.append(relationship) bundle_objects.append(relationship) # Add object_relationships for object_relationship in objects_relationships: bundle_objects.append(object_relationship) ### Create the report if needed if self.misp_create_report and len(object_refs) > 0: report = Report( name=event['Event']['info'], description=event['Event']['info'], published=parse(event['Event']['date']), created_by_ref=author, object_marking_refs=event_markings, labels=['threat-report'], object_refs=object_refs, external_references=[event_external_reference], custom_properties={ 'x_opencti_report_class': self.misp_report_class, 'x_opencti_object_status': 2 } ) bundle_objects.append(report) bundle = Bundle(objects=bundle_objects).serialize() self.helper.send_stix2_bundle(bundle, None, self.update_existing_data, False) def process_attribute(self, author, event_elements, event_markings, attribute): resolved_attributes = self.resolve_type(attribute['type'], attribute['value']) if resolved_attributes is None: return None for resolved_attribute in resolved_attributes: ### Pre-process # Elements attribute_elements = self.prepare_elements(attribute['Galaxy']) # Markings if 'Tag' in attribute: attribute_markings = self.resolve_markings(attribute['Tag'], with_default=False) if len(attribute_markings) == 0: attribute_markings = event_markings else: attribute_markings = event_markings ### Create the indicator observable_type = resolved_attribute['type'] observable_value = resolved_attribute['value'] pattern_type = 'stix' if observable_type in PATTERNTYPES: pattern_type = observable_type elif observable_type not in OPENCTISTIX2: return None else: if 'transform' in OPENCTISTIX2[observable_type]: if OPENCTISTIX2[observable_type]['transform']['operation'] == 'remove_string': observable_value = observable_value.replace(OPENCTISTIX2[observable_type]['transform']['value'], '') lhs = ObjectPath(OPENCTISTIX2[observable_type]['type'], OPENCTISTIX2[observable_type]['path']) observable_value = ObservationExpression(EqualityComparisonExpression(lhs, observable_value)) try: indicator = Indicator( name=resolved_attribute['value'], description=attribute['comment'], pattern=str(observable_value), valid_from=datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime('%Y-%m-%dT%H:%M:%SZ'), labels=['malicious-activity'], created_by_ref=author, object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_observable_type': resolved_attribute['type'], 'x_opencti_observable_value': resolved_attribute['value'], 'x_opencti_pattern_type': pattern_type } ) except: return None ### Create the relationships relationships = [] # Event threats for threat in (event_elements['intrusion_sets'] + event_elements['malwares'] + event_elements['tools']): relationships.append( Relationship( relationship_type='indicates', created_by_ref=author, source_ref=indicator.id, target_ref=threat.id, description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level } ) ) # Attribute threats for threat in (attribute_elements['intrusion_sets'] + attribute_elements['malwares'] + attribute_elements[ 'tools']): relationships.append( Relationship( relationship_type='indicates', created_by_ref=author, source_ref=indicator.id, target_ref=threat.id, description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level } ) ) # Event Attack Patterns for attack_pattern in event_elements['attack_patterns']: if len(event_elements['malwares']) > 0: threats = event_elements['malwares'] elif len(event_elements['intrusion_sets']) > 0: threats = event_elements['intrusion_sets'] else: threats = [] for threat in threats: relationship_uses = Relationship( relationship_type='uses', created_by_ref=author, source_ref=threat.id, target_ref=attack_pattern.id, description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level, 'x_opencti_ignore_dates': True } ) relationships.append(relationship_uses) relationship_indicates = Relationship( relationship_type='indicates', created_by_ref=author, source_ref=indicator.id, target_ref='malware--fa42a846-8d90-4e51-bc29-71d5b4802168', # Fake description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level, 'x_opencti_source_ref': indicator.id, 'x_opencti_target_ref': relationship_uses.id } ) relationships.append(relationship_indicates) # Attribute Attack Patterns for attack_pattern in attribute_elements['attack_patterns']: if len(attribute_elements['malwares']) > 0: threats = attribute_elements['malwares'] elif len(attribute_elements['intrusion_sets']) > 0: threats = attribute_elements['intrusion_sets'] else: threats = [] for threat in threats: relationship_uses = Relationship( relationship_type='uses', created_by_ref=author, source_ref=threat.id, target_ref=attack_pattern.id, description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level, 'x_opencti_ignore_dates': True } ) relationships.append(relationship_uses) relationship_indicates = Relationship( relationship_type='indicates', created_by_ref=author, source_ref=indicator.id, target_ref='malware--fa42a846-8d90-4e51-bc29-71d5b4802168', # Fake description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level, 'x_opencti_source_ref': indicator.id, 'x_opencti_target_ref': relationship_uses.id, 'x_opencti_ignore_dates': True } ) relationships.append(relationship_indicates) return { 'indicator': indicator, 'relationships': relationships, 'attribute_elements': attribute_elements, 'markings': attribute_markings } def process_observable_relations(self, object_attributes, result_table, start_element=0): if start_element == 0: result_table = [] if len(object_attributes) == 1: return [] for x in range(start_element + 1, len(object_attributes)): result_table.append( Relationship( relationship_type='corresponds', source_ref=object_attributes[start_element]['indicator']['id'], target_ref=object_attributes[x]['indicator']['id'], description='Same file', custom_properties={ 'x_opencti_ignore_dates': True } ) ) if start_element != len(object_attributes): return self.process_observable_relations(object_attributes, result_table, start_element + 1) else: return result_table def prepare_elements(self, galaxies): elements = {'intrusion_sets': [], 'malwares': [], 'tools': [], 'attack_patterns': []} added_names = [] for galaxy in galaxies: # Get the linked intrusion sets if ( (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Intrusion Set') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Threat Actor') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Microsoft Activity Group actor') ): for galaxy_entity in galaxy['GalaxyCluster']: if ' - G' in galaxy_entity['value']: name = galaxy_entity['value'].split(' - G')[0] elif 'APT ' in galaxy_entity['value']: name = galaxy_entity['value'].replace('APT ', 'APT') else: name = galaxy_entity['value'] if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']: aliases = galaxy_entity['meta']['synonyms'] else: aliases = [name] if name not in added_names: elements['intrusion_sets'].append(IntrusionSet( name=name, labels=['intrusion-set'], description=galaxy_entity['description'], custom_properties={ 'x_opencti_aliases': aliases } )) added_names.append(name) # Get the linked malwares if ( (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Malware') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Tool') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Ransomware') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Android') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Malpedia') ): for galaxy_entity in galaxy['GalaxyCluster']: if ' - S' in galaxy_entity['value']: name = galaxy_entity['value'].split(' - S')[0] else: name = galaxy_entity['value'] if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']: aliases = galaxy_entity['meta']['synonyms'] else: aliases = [name] if name not in added_names: elements['malwares'].append(Malware( name=name, labels=['malware'], description=galaxy_entity['description'], custom_properties={ 'x_opencti_aliases': aliases } )) added_names.append(name) # Get the linked tools if ( (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Tool') ): for galaxy_entity in galaxy['GalaxyCluster']: if ' - S' in galaxy_entity['value']: name = galaxy_entity['value'].split(' - S')[0] else: name = galaxy_entity['value'] if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']: aliases = galaxy_entity['meta']['synonyms'] else: aliases = [name] if name not in added_names: elements['tools'].append(Tool( name=name, labels=['tool'], description=galaxy_entity['description'], custom_properties={ 'x_opencti_aliases': aliases } )) added_names.append(name) # Get the linked attack_patterns if ( (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Attack Pattern') ): for galaxy_entity in galaxy['GalaxyCluster']: if ' - T' in galaxy_entity['value']: name = galaxy_entity['value'].split(' - T')[0] else: name = galaxy_entity['value'] if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']: aliases = galaxy_entity['meta']['synonyms'] else: aliases = [name] if name not in added_names: elements['attack_patterns'].append(AttackPattern( name=name, labels=['attack-pattern'], description=galaxy_entity['description'], custom_properties={ 'x_opencti_external_id': galaxy_entity['meta']['external_id'][0], 'x_opencti_aliases': aliases, } )) added_names.append(name) return elements def resolve_type(self, type, value): types = { 'md5': ['file-md5'], 'sha1': ['file-sha1'], 'sha256': ['file-sha256'], 'filename': ['file-name'], 'pdb': ['pdb-path'], 'filename|md5': ['file-name', 'file-md5'], 'filename|sha1': ['file-name', 'file-sha1'], 'filename|sha256': ['file-name', 'file-sha256'], 'ip-src': ['ipv4-addr'], 'ip-dst': ['ipv4-addr'], 'hostname': ['domain'], 'domain': ['domain'], 'domain|ip': ['domain', 'ipv4-addr'], 'url': ['url'], 'windows-service-name': ['windows-service-name'], 'windows-service-displayname': ['windows-service-display-name'], 'windows-scheduled-task': ['windows-scheduled-task'] } if type in types: resolved_types = types[type] if len(resolved_types) == 2: values = value.split('|') if resolved_types[0] == 'ipv4-addr': type_0 = self.detect_ip_version(values[0]) else: type_0 = resolved_types[0] if resolved_types[1] == 'ipv4-addr': type_1 = self.detect_ip_version(values[1]) else: type_1 = resolved_types[1] return [{'type': type_0, 'value': values[0]}, {'type': type_1, 'value': values[1]}] else: if resolved_types[0] == 'ipv4-addr': type_0 = self.detect_ip_version(value) else: type_0 = resolved_types[0] return [{'type': type_0, 'value': value}] def detect_ip_version(self, value): if len(value) > 16: return 'ipv6-addr' else: return 'ipv4-addr' def resolve_markings(self, tags, with_default=True): markings = [] for tag in tags: if tag['name'] == 'tlp:white': markings.append(TLP_WHITE) if tag['name'] == 'tlp:green': markings.append(TLP_GREEN) if tag['name'] == 'tlp:amber': markings.append(TLP_AMBER) if tag['name'] == 'tlp:red': markings.append(TLP_RED) if len(markings) == 0 and with_default: markings.append(TLP_WHITE) return markings
class Valhalla: """OpenCTI valhalla main class""" _DEMO_API_KEY = "1111111111111111111111111111111111111111111111111111111111111111" _STATE_LAST_RUN = "last_run" _VALHALLA_LAST_VERSION = "valhalla_last_version" def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/../config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) # Extra config self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, isNumber=True, ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.API_KEY = get_config_variable("VALHALLA_API_KEY", ["valhalla", "api_key"], config) self.INTERVAL_SEC = get_config_variable( "VALHALLA_INTERVAL_SEC", ["valhalla", "interval_sec"], config, isNumber=True, ) self.helper = OpenCTIConnectorHelper(config) self.helper.log_info(f"loaded valhalla config: {config}") # If we run without API key we can assume all data is TLP:WHITE else we # default to TLP:AMBER to be safe. if self.API_KEY == "" or self.API_KEY is None: self.default_marking = self.helper.api.marking_definition.read( id=TLP_WHITE["id"]) self.valhalla_client = ValhallaAPI() else: self.default_marking = self.helper.api.marking_definition.read( id=TLP_AMBER["id"]) self.valhalla_client = ValhallaAPI(api_key=self.API_KEY) self.knowledge_importer = KnowledgeImporter( self.helper, self.confidence_level, self.update_existing_data, self.default_marking, self.valhalla_client, ) def run(self): self.helper.log_info("starting valhalla connector...") while True: try: status_data = self.valhalla_client.get_status() api_status = Status.parse_obj(status_data) self.helper.log_info(f"current valhalla status: {api_status}") current_time = int(datetime.utcnow().timestamp()) current_state = self._load_state() self.helper.log_info(f"loaded state: {current_state}") last_run = self._get_state_value(current_state, self._STATE_LAST_RUN) last_valhalla_version = self._get_state_value( current_state, self._VALHALLA_LAST_VERSION) if self._is_scheduled( last_run, current_time) and self._check_version( last_valhalla_version, api_status.version): self.helper.log_info("running importers") knowledge_importer_state = self._run_knowledge_importer( current_state) self.helper.log_info("done with running importers") new_state = current_state.copy() new_state.update(knowledge_importer_state) new_state[self._STATE_LAST_RUN] = int( datetime.utcnow().timestamp()) new_state[self._VALHALLA_LAST_VERSION] = api_status.version self.helper.log_info(f"storing new state: {new_state}") self.helper.set_state(new_state) self.helper.log_info( f"state stored, next run in: {self._get_interval()} seconds" ) else: new_interval = self._get_interval() - (current_time - last_run) self.helper.log_info( f"connector will not run, next run in: {new_interval} seconds" ) # After a successful run pause at least 60sec time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) exit(0) def _run_knowledge_importer( self, current_state: Mapping[str, Any]) -> Mapping[str, Any]: return self.knowledge_importer.run(current_state) def _get_interval(self) -> int: return int(self.INTERVAL_SEC) def _load_state(self) -> Dict[str, Any]: current_state = self.helper.get_state() if not current_state: return {} return current_state @staticmethod def _get_state_value(state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None) -> Any: if state is not None: return state.get(key, default) return default def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool: if last_run is None: return True time_diff = current_time - last_run return time_diff >= self._get_interval() def _check_version(self, last_version: Optional[int], current_version: int) -> bool: if last_version is None: return True return current_version > last_version
class Malpedia: """OpenCTI Malpedia main class""" _STATE_LAST_RUN = "state_last_run" _MALPEDIA_LAST_VERSION = "malpedia_last_version" def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/../config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.SafeLoader) if os.path.isfile(config_file_path) else {} ) # Extra config self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, isNumber=True, ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.AUTH_KEY = get_config_variable( "MALPEDIA_AUTH_KEY", ["malpedia", "auth_key"], config ) self.INTERVAL_SEC = get_config_variable( "MALPEDIA_INTERVAL_SEC", ["malpedia", "interval_sec"], config ) self.import_intrusion_sets = get_config_variable( "MALPEDIA_IMPORT_INTRUSION_SETS", ["malpedia", "import_intrusion_sets"], config, ) self.import_yara = get_config_variable( "MALPEDIA_IMPORT_YARA", ["malpedia", "import_yara"], config ) self.create_indicators = get_config_variable( "MALPEDIA_CREATE_INDICATORS", ["malpedia", "create_indicators"], config ) self.create_observables = get_config_variable( "MALPEDIA_CREATE_OBSERVABLES", ["malpedia", "create_observables"], config ) self.helper = OpenCTIConnectorHelper(config) self.helper.log_info(f"loaded malpedia config: {config}") # Create Malpedia client and importers self.client = MalpediaClient(self.AUTH_KEY) # If we run without API key we can assume all data is TLP:WHITE else we # default to TLP:AMBER to be safe. if self.client.unauthenticated: self.default_marking = self.helper.api.marking_definition.read( id=TLP_WHITE["id"] ) else: self.default_marking = self.helper.api.marking_definition.read( id=TLP_AMBER["id"] ) self.knowledge_importer = KnowledgeImporter( self.helper, self.client, self.confidence_level, self.update_existing_data, self.import_intrusion_sets, self.import_yara, self.create_indicators, self.create_observables, self.default_marking, ) def _load_state(self) -> Dict[str, Any]: current_state = self.helper.get_state() if not current_state: return {} return current_state @staticmethod def _get_state_value( state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None ) -> Any: if state is not None: return state.get(key, default) return default def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool: if last_run is None: return True time_diff = current_time - last_run return time_diff >= int(self.INTERVAL_SEC) def _check_version(self, last_version: Optional[int], current_version: int) -> bool: if last_version is None: return True return current_version > last_version @staticmethod def _current_unix_timestamp() -> int: return int(datetime.utcnow().timestamp()) def _get_interval(self): return int(self.INTERVAL_SEC) def run(self): self.helper.log_info("starting Malpedia connector...") while True: try: current_malpedia_version = self.client.current_version() self.helper.log_info( f"current Malpedia version: {current_malpedia_version}" ) timestamp = self._current_unix_timestamp() current_state = self._load_state() self.helper.log_info(f"loaded state: {current_state}") last_run = self._get_state_value(current_state, self._STATE_LAST_RUN) last_malpedia_version = self._get_state_value( current_state, self._MALPEDIA_LAST_VERSION ) # Only run the connector if: # 1. It is scheduled to run per interval # 2. The global Malpedia version from the API is newer than our # last stored version. if self._is_scheduled(last_run, timestamp) and self._check_version( last_malpedia_version, current_malpedia_version ): self.helper.log_info("running importers") knowledge_importer_state = self._run_knowledge_importer( current_state ) self.helper.log_info("done with running importers") new_state = current_state.copy() new_state.update(knowledge_importer_state) new_state[self._STATE_LAST_RUN] = self._current_unix_timestamp() new_state[self._MALPEDIA_LAST_VERSION] = current_malpedia_version self.helper.log_info(f"storing new state: {new_state}") self.helper.set_state(new_state) self.helper.log_info( f"state stored, next run in: {self._get_interval()} seconds" ) else: new_interval = self._get_interval() - (timestamp - last_run) self.helper.log_info( f"connector will not run, next run in: {new_interval} seconds" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) exit(0) def _run_knowledge_importer( self, current_state: Mapping[str, Any] ) -> Mapping[str, Any]: return self.knowledge_importer.run(current_state)
class URLhaus: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.urlhaus_csv_url = get_config_variable("URLHAUS_CSV_URL", ["urlhaus", "csv_url"], config) self.urlhaus_import_offline = get_config_variable( "URLHAUS_IMPORT_OFFLINE", ["urlhaus", "import_offline"], config, False, True) self.urlhaus_interval = get_config_variable("URLHAUS_INTERVAL", ["urlhaus", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.identity = self.helper.api.identity.create( type="Organization", name="Abuse.ch", description= "abuse.ch is operated by a random swiss guy fighting malware for non-profit, running a couple of projects helping internet service providers and network operators protecting their infrastructure from malware.", ) def get_interval(self): return int(self.urlhaus_interval) * 60 * 60 * 24 def next_run(self, seconds): return def run(self): self.helper.log_info("Fetching URLhaus dataset...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.urlhaus_interval) - 1) * 60 * 60 * 24)): self.helper.log_info("Connector will run!") now = datetime.utcfromtimestamp(timestamp) friendly_name = "URLhaus run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) try: response = urllib.request.urlopen( self.urlhaus_csv_url, context=ssl.create_default_context( cafile=certifi.where()), ) image = response.read() with open( os.path.dirname(os.path.abspath(__file__)) + "/data.csv", "wb", ) as file: file.write(image) fp = open( os.path.dirname(os.path.abspath(__file__)) + "/data.csv", "r", ) rdr = csv.reader(filter(lambda row: row[0] != "#", fp)) bundle_objects = [] for row in rdr: if row[3] == "online" or self.urlhaus_import_offline: external_reference = ExternalReference( source_name="Abuse.ch URLhaus", url=row[6], description="URLhaus repository URL", ) stix_observable = SimpleObservable( id=OpenCTIStix2Utils. generate_random_stix_id( "x-opencti-simple-observable"), key="Url.value", value=row[2], description=row[4], x_opencti_score=80, object_marking_refs=[TLP_WHITE], labels=row[5].split(","), created_by_ref=self. identity["standard_id"], x_opencti_create_indicator=True, external_references=[external_reference], ) bundle_objects.append(stix_observable) fp.close() bundle = Bundle(objects=bundle_objects).serialize() self.helper.send_stix2_bundle( bundle, entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) if os.path.exists( os.path.dirname(os.path.abspath(__file__)) + "/data.csv"): os.remove( os.path.dirname(os.path.abspath(__file__)) + "/data.csv") except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class FireEye: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.fireeye_api_url = get_config_variable("FIREEYE_API_URL", ["fireeye", "api_url"], config) self.fireeye_api_v3_public = get_config_variable( "FIREEYE_API_V3_PUBLIC", ["fireeye", "api_v3_public"], config) self.fireeye_api_v3_secret = get_config_variable( "FIREEYE_API_V3_SECRET", ["fireeye", "api_v3_secret"], config) self.fireeye_collections = get_config_variable( "FIREEYE_COLLECTIONS", ["fireeye", "collections"], config).split(",") self.fireeye_import_start_date = get_config_variable( "FIREEYE_IMPORT_START_DATE", ["fireeye", "import_start_date"], config, ) self.fireeye_interval = get_config_variable("FIREEYE_INTERVAL", ["fireeye", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.added_after = parse(self.fireeye_import_start_date).timestamp() self.identity = self.helper.api.identity.create( type="Organization", name="FireEye, Inc.", description= "FireEye is a publicly traded cybersecurity company headquartered in Milpitas, California. It has been involved in the detection and prevention of major cyber attacks. It provides hardware, software, and services to investigate cybersecurity attacks, protect against malicious software, and analyze IT security risks. FireEye was founded in 2004.", ) self.marking = self.helper.api.marking_definition.create( definition_type="COMMERCIAL", definition="FIREEYE", x_opencti_order=99, x_opencti_color="#a01526", ) # Init variables self.auth_token = None self._get_token() def get_interval(self): return int(self.fireeye_interval) * 60 def _get_token(self): r = requests.post( self.fireeye_api_url + "/token", auth=HTTPBasicAuth(self.fireeye_api_v3_public, self.fireeye_api_v3_secret), data={"grant_type": "client_credentials"}, ) if r.status_code != 200: raise ValueError("FireEye Authentication failed") data = r.json() self.auth_token = data.get("access_token") def _search(self, stix_id, retry=False): time.sleep(3) self.helper.log_info("Searching for " + stix_id) headers = { "authorization": "Bearer " + self.auth_token, "accept": "application/vnd.oasis.stix+json; version=2.1", "x-app-name": "opencti-connector-4.3.0", } body = """ { "queries": [ { "type": "ENTITY_TYPE", "query": "id = 'ENTITY_ID'" } ], "include_connected_objects": false } """ entity_type = stix_id.split("--")[0] if entity_type not in searchable_types: return None body = body.replace("ENTITY_TYPE", entity_type).replace("ENTITY_ID", stix_id) r = requests.post(self.fireeye_api_url + "/collections/search", data=body, headers=headers) if r.status_code == 200: return r elif (r.status_code == 401 or r.status_code == 403) and not retry: self._get_token() return self._search(stix_id, True) elif r.status_code == 204 or r.status_code == 205: return None elif r.status_code == 401 or r.status_code == 403: raise ValueError("Query failed, permission denied") else: print(r) raise ValueError("An unknown error occurred") def _query(self, url, retry=False): headers = { "authorization": "Bearer " + self.auth_token, "accept": "application/vnd.oasis.stix+json; version=2.1", "x-app-name": "opencti-connector-4.3.0", } r = requests.get(url, headers=headers) if r.status_code == 200: return r elif (r.status_code == 401 or r.status_code == 403) and not retry: self._get_token() return self._query(url, True) elif r.status_code == 401 or r.status_code == 403: raise ValueError("Query failed, permission denied") else: raise ValueError("An unknown error occurred") def _send_entity(self, bundle, work_id): if "objects" in bundle and len(bundle) > 0: final_objects = [] for stix_object in bundle["objects"]: if stix_object["type"] == "threat-actor": stix_object["type"] = "intrusion-set" stix_object["id"] = stix_object["id"].replace( "threat-actor", "intrusion-set") if "created_by_ref" not in stix_object: stix_object["created_by_ref"] = self.identity[ "standard_id"] if stix_object["type"] != "marking-definition": stix_object["object_marking_refs"] = [ "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82" ] stix_object["object_marking_refs"].append( self.marking["standard_id"]) final_objects.append(stix_object) final_bundle = {"type": "bundle", "objects": final_objects} self.helper.send_stix2_bundle( json.dumps(final_bundle), update=self.update_existing_data, work_id=work_id, ) def _import_collection(self, collection, last_id_modified_timestamp=None, last_id=None, work_id=None): have_next_page = True url = None last_object = None while have_next_page: if url is None: if last_id_modified_timestamp is not None: url = (self.fireeye_api_url + "/collections/" + collection + "/objects" + "?added_after=" + str(self.added_after) + "&length=100" + "&last_id_modified_timestamp=" + str(last_id_modified_timestamp)) else: url = (self.fireeye_api_url + "/collections/" + collection + "/objects" + "?added_after=" + str(self.added_after) + "&length=100") result = self._query(url) parsed_result = json.loads(result.text) if "objects" in parsed_result and len(parsed_result) > 0: last_object = parsed_result["objects"][-1] object_ids = [ stix_object["id"] for stix_object in parsed_result["objects"] ] if last_object["id"] != last_id: final_objects = [] for stix_object in parsed_result["objects"]: if stix_object["type"] == "threat-actor": stix_object["type"] = "intrusion-set" stix_object["id"] = stix_object["id"].replace( "threat-actor", "intrusion-set") if stix_object["type"] == "relationship": # If the source_ref is not in the current bundle if stix_object["source_ref"] not in object_ids: # Search entity in OpenCTI opencti_entity = ( self.helper.api.stix_domain_object.read( id=stix_object["source_ref"])) # If the entity is not found if opencti_entity is None: # Search the entity in FireEye fireeye_entity = self._search( stix_object["source_ref"]) # If the entity is found if fireeye_entity is not None: fireeye_entity_decoded = json.loads( fireeye_entity.text) # Send the entity before this bundle self._send_entity( fireeye_entity_decoded, work_id) stix_object["source_ref"] = stix_object[ "source_ref"].replace("threat-actor", "intrusion-set") # Search if the entity is not in bundle if stix_object["target_ref"] not in object_ids: opencti_entity = ( self.helper.api.stix_domain_object.read( id=stix_object["target_ref"])) if opencti_entity is None: fireeye_entity = self._search( stix_object["target_ref"]) if fireeye_entity is not None: fireeye_entity_decoded = json.loads( fireeye_entity.text) self._send_entity( fireeye_entity_decoded, work_id) stix_object["target_ref"] = stix_object[ "target_ref"].replace("threat-actor", "intrusion-set") if ("object_refs" in stix_object and len(stix_object["object_refs"]) > 0): for object_ref in stix_object["object_refs"]: if object_ref not in object_ids: opencti_entity = (self.helper.api. stix_domain_object.read( id=object_ref)) if opencti_entity is None: fireeye_entity = self._search( object_ref) if fireeye_entity is not None: fireeye_entity_decoded = json.loads( fireeye_entity.text) self._send_entity( fireeye_entity_decoded, work_id) if "created_by_ref" not in stix_object: stix_object["created_by_ref"] = self.identity[ "standard_id"] if stix_object["type"] != "marking-definition": stix_object["object_marking_refs"] = [ "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82" ] stix_object["object_marking_refs"].append( self.marking["standard_id"]) final_objects.append(stix_object) final_bundle = {"type": "bundle", "objects": final_objects} self.helper.send_stix2_bundle( json.dumps(final_bundle), update=self.update_existing_data, work_id=work_id, ) headers = result.headers if "Link" in headers: have_next_page = True link = headers["Link"].split(";") url = link[0][1:-1] last_id_modified_timestamp = parse_qs( urlparse( url).query)["last_id_modified_timestamp"][0] else: have_next_page = False else: have_next_page = False return { "last_id_modified_timestamp": last_id_modified_timestamp, "last_id": last_object["id"] if "id" in last_object else None, } def run(self): while True: try: self.helper.log_info("Synchronizing with FireEye API...") timestamp = int(time.time()) now = datetime.datetime.utcfromtimestamp(timestamp) friendly_name = "FireEye run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) current_state = self.helper.get_state() if (current_state is None or "last_id_modified_timestamp" not in current_state): self.helper.set_state({ "last_id_modified_timestamp": { "indicators": None, "reports": None, }, "last_id": { "indicators": None, "reports": None, }, }) current_state = self.helper.get_state() last_id_modified_timestamp = current_state[ "last_id_modified_timestamp"] last_id = current_state["last_id"] if "indicators" in self.fireeye_collections: self.helper.log_info( "Get indicators created after " + str(last_id_modified_timestamp["indicators"])) indicators_last = self._import_collection( "indicators", last_id_modified_timestamp["indicators"], last_id["indicators"], work_id, ) current_state = self.helper.get_state() self.helper.set_state({ "last_id_modified_timestamp": { "indicators": indicators_last["last_id_modified_timestamp"], "reports": current_state["last_id_modified_timestamp"] ["reports"], }, "last_id": { "indicators": indicators_last["last_id"], "reports": current_state["last_id"]["reports"], }, }) if "reports" in self.fireeye_collections: self.helper.log_info( "Get reports created after " + str(last_id_modified_timestamp["reports"])) reports_last = self._import_collection( "reports", last_id_modified_timestamp["reports"], last_id["reports"], work_id, ) current_state = self.helper.get_state() self.helper.set_state({ "last_id_modified_timestamp": { "indicators": current_state["last_id_modified_timestamp"] ["indicators"], "reports": reports_last["last_id_modified_timestamp"], }, "last_id": { "indicators": current_state["last_id"]["indicators"], "reports": reports_last["last_id"], }, }) message = "End of synchronization" self.helper.api.work.to_processed(work_id, message) self.helper.log_info(message) time.sleep(self.get_interval()) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class OpenCTI: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader ) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) # Extra config self.opencti_sectors_file_url = get_config_variable( 'CONFIG_SECTORS_FILE_URL', ['config', 'sectors_file_url'], config) self.opencti_geography_file_url = get_config_variable( 'CONFIG_GEOGRAPHY_FILE_URL', ['config', 'geography_file_url'], config) self.opencti_interval = get_config_variable('CONFIG_INTERVAL', ['config', 'interval'], config, True) self.update_existing_data = get_config_variable( 'CONNECTOR_UPDATE_EXISTING_DATA', ['connector', 'update_existing_data'], config) def get_interval(self): return int(self.opencti_interval) * 60 * 60 * 24 def run(self): self.helper.log_info('Fetching OpenCTI datasets...') while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and 'last_run' in current_state: last_run = current_state['last_run'] self.helper.log_info('Connector last run: ' + datetime.utcfromtimestamp(last_run). strftime('%Y-%m-%d %H:%M:%S')) else: last_run = None self.helper.log_info('Connector has never run') # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.opencti_interval) - 1) * 60 * 60 * 24)): sectors_data = urllib.request.urlopen( self.opencti_sectors_file_url).read() self.helper.send_stix2_bundle(sectors_data.decode('utf-8'), self.helper.connect_scope, self.update_existing_data) geography_data = urllib.request.urlopen( self.opencti_geography_file_url).read() self.helper.send_stix2_bundle( geography_data.decode('utf-8'), self.helper.connect_scope, self.update_existing_data) # Store the current timestamp as a last run self.helper.log_info( 'Connector successfully run, storing last_run as ' + str(timestamp)) self.helper.set_state({'last_run': timestamp}) self.helper.log_info( 'Last_run stored, next run in: ' + str(round(self.get_interval() / 60 / 60 / 24, 2)) + ' days') time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( 'Connector will not run, next run in: ' + str(round(new_interval / 60 / 60 / 24, 2)) + ' days') time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info('Connector stop') exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Misp: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.misp_url = get_config_variable("MISP_URL", ["misp", "url"], config) self.misp_key = get_config_variable("MISP_KEY", ["misp", "key"], config) self.misp_ssl_verify = get_config_variable( "MISP_SSL_VERIFY", ["misp", "ssl_verify"], config ) self.misp_create_report = get_config_variable( "MISP_CREATE_REPORTS", ["misp", "create_reports"], config ) self.misp_report_class = ( get_config_variable("MISP_REPORT_CLASS", ["misp", "report_class"], config) or "MISP Event" ) self.misp_import_from_date = get_config_variable( "MISP_IMPORT_FROM_DATE", ["misp", "import_from_date"], config ) self.misp_import_tags = get_config_variable( "MISP_IMPORT_TAGS", ["misp", "import_tags"], config ) self.misp_interval = get_config_variable( "MISP_INTERVAL", ["misp", "interval"], config, True ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) # Initialize MISP self.misp = ExpandedPyMISP( url=self.misp_url, key=self.misp_key, ssl=self.misp_ssl_verify, debug=False ) def get_interval(self): return int(self.misp_interval) * 60 def run(self): while True: timestamp = int(time.time()) # Get the last_run datetime current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = datetime.utcfromtimestamp( current_state["last_run"] ).strftime("%Y-%m-%d %H:%M:%S") self.helper.log_info("Connector last run: " + last_run) else: last_run = None self.helper.log_info("Connector has never run") # If import with tags complex_query_tag = None if self.misp_import_tags is not None: or_parameters = [] for tag in self.misp_import_tags.split(","): or_parameters.append(tag.strip()) complex_query_tag = self.misp.build_complex_query( or_parameters=or_parameters ) # If import from a specific date import_from_date = None if self.misp_import_from_date is not None: import_from_date = parse(self.misp_import_from_date).strftime( "%Y-%m-%d %H:%M:%S" ) # Prepare the query kwargs = dict() if complex_query_tag is not None: kwargs["tags"] = complex_query_tag if last_run is not None: kwargs["timestamp"] = last_run elif import_from_date is not None: kwargs["date_from"] = import_from_date # Query with pagination of 100 current_page = 1 while True: kwargs["limit"] = 50 kwargs["page"] = current_page self.helper.log_info( "Fetching MISP events with args: " + json.dumps(kwargs) ) events = [] try: events = self.misp.search("events", **kwargs) except Exception as e: self.helper.log_error(str(e)) try: events = self.misp.search("events", **kwargs) except Exception as e: self.helper.log_error(str(e)) self.helper.log_info("MISP returned " + str(len(events)) + " events.") # Break if no more result if len(events) == 0: break try: self.process_events(events) except Exception as e: self.helper.log_error(str(e)) current_page += 1 self.helper.set_state({"last_run": timestamp}) time.sleep(self.get_interval()) def process_events(self, events): for event in events: self.helper.log_info("Processing event " + event["Event"]["uuid"]) ### Default variables added_markings = [] added_entities = [] added_object_refs = [] ### Pre-process # Author author = Identity( name=event["Event"]["Orgc"]["name"], identity_class="organization" ) # Elements event_elements = self.prepare_elements(event["Event"]["Galaxy"], author) # Markings if "Tag" in event["Event"]: event_markings = self.resolve_markings(event["Event"]["Tag"]) else: event_markings = [TLP_WHITE] # Tags event_tags = [] if "Tag" in event["Event"]: event_tags = self.resolve_tags(event["Event"]["Tag"]) # ExternalReference event_external_reference = ExternalReference( source_name=self.helper.connect_name, external_id=event["Event"]["uuid"], url=self.misp_url + "/events/view/" + event["Event"]["uuid"], ) ### Get indicators event_external_references = [event_external_reference] indicators = [] # Get attributes for attribute in event["Event"]["Attribute"]: indicator = self.process_attribute( author, event_elements, event_markings, [], attribute ) if attribute["type"] == "link": event_external_references.append( ExternalReference( source_name=attribute["category"], external_id=attribute["uuid"], url=attribute["value"], ) ) if indicator is not None: indicators.append(indicator) # Get attributes of objects objects_relationships = [] for object in event["Event"]["Object"]: attribute_external_references = [] for attribute in object["Attribute"]: if attribute["type"] == "link": attribute_external_references.append( ExternalReference( source_name=attribute["category"], external_id=attribute["uuid"], url=attribute["value"], ) ) object_attributes = [] for attribute in object["Attribute"]: indicator = self.process_attribute( author, event_elements, event_markings, attribute_external_references, attribute, ) if indicator is not None: indicators.append(indicator) if ( object["meta-category"] == "file" and indicator["indicator"].x_opencti_observable_type in FILETYPES ): object_attributes.append(indicator) objects_relationships.extend( self.process_observable_relations(object_attributes, []) ) ### Prepare the bundle bundle_objects = [author] object_refs = [] # Add event markings for event_marking in event_markings: if event_marking["id"] not in added_markings: bundle_objects.append(event_marking) added_markings.append(event_marking["id"]) # Add event elements all_event_elements = ( event_elements["intrusion_sets"] + event_elements["malwares"] + event_elements["tools"] + event_elements["attack_patterns"] ) for event_element in all_event_elements: if event_element["name"] not in added_object_refs: object_refs.append(event_element) added_object_refs.append(event_element["name"]) if event_element["name"] not in added_entities: bundle_objects.append(event_element) added_entities.append(event_element["name"]) # Add indicators for indicator in indicators: if indicator["indicator"]["id"] not in added_object_refs: object_refs.append(indicator["indicator"]) added_object_refs.append(indicator["indicator"]["id"]) if indicator["indicator"]["id"] not in added_entities: bundle_objects.append(indicator["indicator"]) added_entities.append(indicator["indicator"]["id"]) # Add attribute markings for attribute_marking in indicator["markings"]: if attribute_marking["id"] not in added_markings: bundle_objects.append(attribute_marking) added_markings.append(attribute_marking["id"]) # Add attribute elements all_attribute_elements = ( indicator["attribute_elements"]["intrusion_sets"] + indicator["attribute_elements"]["malwares"] + indicator["attribute_elements"]["tools"] + indicator["attribute_elements"]["attack_patterns"] ) for attribute_element in all_attribute_elements: if attribute_element["name"] not in added_object_refs: object_refs.append(attribute_element) added_object_refs.append(attribute_element["name"]) if attribute_element["name"] not in added_entities: bundle_objects.append(attribute_element) added_entities.append(attribute_element["name"]) # Add attribute relationships for relationship in indicator["relationships"]: object_refs.append(relationship) bundle_objects.append(relationship) # Add object_relationships for object_relationship in objects_relationships: bundle_objects.append(object_relationship) ### Create the report if needed if self.misp_create_report and len(object_refs) > 0: report = Report( name=event["Event"]["info"], description=event["Event"]["info"], published=parse(event["Event"]["date"]), created_by_ref=author, object_marking_refs=event_markings, labels=["threat-report"], object_refs=object_refs, external_references=event_external_references, custom_properties={ "x_opencti_report_class": self.misp_report_class, "x_opencti_object_status": 2, "x_opencti_tags": event_tags, }, ) bundle_objects.append(report) bundle = Bundle(objects=bundle_objects).serialize() self.helper.log_info("Sending event STIX2 bundle") self.helper.send_stix2_bundle( bundle, None, self.update_existing_data, False ) def process_attribute( self, author, event_elements, event_markings, attribute_external_references, attribute, ): try: resolved_attributes = self.resolve_type( attribute["type"], attribute["value"] ) if resolved_attributes is None: return None for resolved_attribute in resolved_attributes: ### Pre-process # Elements attribute_elements = self.prepare_elements(attribute["Galaxy"], author) # Markings & Tags attribute_tags = [] if "Tag" in attribute: attribute_markings = self.resolve_markings( attribute["Tag"], with_default=False ) attribute_tags = self.resolve_tags(attribute["Tag"]) if len(attribute_markings) == 0: attribute_markings = event_markings else: attribute_markings = event_markings ### Create the indicator observable_type = resolved_attribute["type"] observable_value = resolved_attribute["value"] name = resolved_attribute["value"] pattern_type = "stix" # observable type is yara for instance if observable_type in PATTERNTYPES: pattern_type = observable_type observable_type = "Unknown" genuine_pattern = ( "[file:hashes.md5 = 'd41d8cd98f00b204e9800998ecf8427e']" ) pattern = observable_value name = ( attribute["comment"] if len(attribute["comment"]) > 0 else observable_type ) # observable type is not in stix 2 elif observable_type not in OPENCTISTIX2: return None # observable type is in stix else: if "transform" in OPENCTISTIX2[observable_type]: if ( OPENCTISTIX2[observable_type]["transform"]["operation"] == "remove_string" ): observable_value = observable_value.replace( OPENCTISTIX2[observable_type]["transform"]["value"], "" ) lhs = ObjectPath( OPENCTISTIX2[observable_type]["type"], OPENCTISTIX2[observable_type]["path"], ) genuine_pattern = str( ObservationExpression( EqualityComparisonExpression(lhs, observable_value) ) ) pattern = genuine_pattern indicator = Indicator( name=name, description=attribute["comment"], pattern=genuine_pattern, valid_from=datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), labels=["malicious-activity"], created_by_ref=author, object_marking_refs=attribute_markings, external_references=attribute_external_references, custom_properties={ "x_opencti_indicator_pattern": pattern, "x_opencti_observable_type": observable_type, "x_opencti_observable_value": observable_value, "x_opencti_pattern_type": pattern_type, "x_opencti_tags": attribute_tags, }, ) ### Create the relationships relationships = [] # Event threats for threat in ( event_elements["intrusion_sets"] + event_elements["malwares"] + event_elements["tools"] ): relationships.append( Relationship( relationship_type="indicates", created_by_ref=author, source_ref=indicator.id, target_ref=threat.id, description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, }, ) ) # Attribute threats for threat in ( attribute_elements["intrusion_sets"] + attribute_elements["malwares"] + attribute_elements["tools"] ): relationships.append( Relationship( relationship_type="indicates", created_by_ref=author, source_ref=indicator.id, target_ref=threat.id, description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, }, ) ) # Event Attack Patterns for attack_pattern in event_elements["attack_patterns"]: if len(event_elements["malwares"]) > 0: threats = event_elements["malwares"] elif len(event_elements["intrusion_sets"]) > 0: threats = event_elements["intrusion_sets"] else: threats = [] for threat in threats: relationship_uses = Relationship( relationship_type="uses", created_by_ref=author, source_ref=threat.id, target_ref=attack_pattern.id, description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, "x_opencti_ignore_dates": True, }, ) relationships.append(relationship_uses) relationship_indicates = Relationship( relationship_type="indicates", created_by_ref=author, source_ref=indicator.id, target_ref="malware--fa42a846-8d90-4e51-bc29-71d5b4802168", # Fake description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, "x_opencti_source_ref": indicator.id, "x_opencti_target_ref": relationship_uses.id, }, ) relationships.append(relationship_indicates) # Attribute Attack Patterns for attack_pattern in attribute_elements["attack_patterns"]: if len(attribute_elements["malwares"]) > 0: threats = attribute_elements["malwares"] elif len(attribute_elements["intrusion_sets"]) > 0: threats = attribute_elements["intrusion_sets"] else: threats = [] for threat in threats: relationship_uses = Relationship( relationship_type="uses", created_by_ref=author, source_ref=threat.id, target_ref=attack_pattern.id, description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, "x_opencti_ignore_dates": True, }, ) relationships.append(relationship_uses) relationship_indicates = Relationship( relationship_type="indicates", created_by_ref=author, source_ref=indicator.id, target_ref="malware--fa42a846-8d90-4e51-bc29-71d5b4802168", # Fake description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, "x_opencti_source_ref": indicator.id, "x_opencti_target_ref": relationship_uses.id, "x_opencti_ignore_dates": True, }, ) relationships.append(relationship_indicates) return { "indicator": indicator, "relationships": relationships, "attribute_elements": attribute_elements, "markings": attribute_markings, } except: return None def process_observable_relations( self, object_attributes, result_table, start_element=0 ): if start_element == 0: result_table = [] if len(object_attributes) == 1: return [] for x in range(start_element + 1, len(object_attributes)): result_table.append( Relationship( relationship_type="corresponds", source_ref=object_attributes[start_element]["indicator"]["id"], target_ref=object_attributes[x]["indicator"]["id"], description="Same file", custom_properties={"x_opencti_ignore_dates": True}, ) ) if start_element != len(object_attributes): return self.process_observable_relations( object_attributes, result_table, start_element + 1 ) else: return result_table def prepare_elements(self, galaxies, author): elements = { "intrusion_sets": [], "malwares": [], "tools": [], "attack_patterns": [], } added_names = [] for galaxy in galaxies: # Get the linked intrusion sets if ( ( galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Intrusion Set" ) or (galaxy["namespace"] == "misp" and galaxy["name"] == "Threat Actor") or ( galaxy["namespace"] == "misp" and galaxy["name"] == "Microsoft Activity Group actor" ) ): for galaxy_entity in galaxy["GalaxyCluster"]: if " - G" in galaxy_entity["value"]: name = galaxy_entity["value"].split(" - G")[0] elif "APT " in galaxy_entity["value"]: name = galaxy_entity["value"].replace("APT ", "APT") else: name = galaxy_entity["value"] if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]: aliases = galaxy_entity["meta"]["synonyms"] else: aliases = [name] if name not in added_names: elements["intrusion_sets"].append( IntrusionSet( name=name, labels=["intrusion-set"], description=galaxy_entity["description"], created_by_ref=author, custom_properties={"x_opencti_aliases": aliases}, ) ) added_names.append(name) # Get the linked malwares if ( (galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Malware") or (galaxy["namespace"] == "misp" and galaxy["name"] == "Tool") or (galaxy["namespace"] == "misp" and galaxy["name"] == "Ransomware") or (galaxy["namespace"] == "misp" and galaxy["name"] == "Android") or (galaxy["namespace"] == "misp" and galaxy["name"] == "Malpedia") ): for galaxy_entity in galaxy["GalaxyCluster"]: if " - S" in galaxy_entity["value"]: name = galaxy_entity["value"].split(" - S")[0] else: name = galaxy_entity["value"] if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]: aliases = galaxy_entity["meta"]["synonyms"] else: aliases = [name] if name not in added_names: elements["malwares"].append( Malware( name=name, labels=["malware"], description=galaxy_entity["description"], created_by_ref=author, custom_properties={"x_opencti_aliases": aliases}, ) ) added_names.append(name) # Get the linked tools if galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Tool": for galaxy_entity in galaxy["GalaxyCluster"]: if " - S" in galaxy_entity["value"]: name = galaxy_entity["value"].split(" - S")[0] else: name = galaxy_entity["value"] if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]: aliases = galaxy_entity["meta"]["synonyms"] else: aliases = [name] if name not in added_names: elements["tools"].append( Tool( name=name, labels=["tool"], description=galaxy_entity["description"], created_by_ref=author, custom_properties={"x_opencti_aliases": aliases}, ) ) added_names.append(name) # Get the linked attack_patterns if ( galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Attack Pattern" ): for galaxy_entity in galaxy["GalaxyCluster"]: if " - T" in galaxy_entity["value"]: name = galaxy_entity["value"].split(" - T")[0] else: name = galaxy_entity["value"] if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]: aliases = galaxy_entity["meta"]["synonyms"] else: aliases = [name] if name not in added_names: elements["attack_patterns"].append( AttackPattern( name=name, labels=["attack-pattern"], description=galaxy_entity["description"], created_by_ref=author, custom_properties={ "x_opencti_external_id": galaxy_entity["meta"][ "external_id" ][0], "x_opencti_aliases": aliases, }, ) ) added_names.append(name) return elements def resolve_type(self, type, value): types = { "yara": ["yara"], "md5": ["file-md5"], "sha1": ["file-sha1"], "sha256": ["file-sha256"], "filename": ["file-name"], "pdb": ["pdb-path"], "filename|md5": ["file-name", "file-md5"], "filename|sha1": ["file-name", "file-sha1"], "filename|sha256": ["file-name", "file-sha256"], "ip-src": ["ipv4-addr"], "ip-dst": ["ipv4-addr"], "hostname": ["domain"], "domain": ["domain"], "domain|ip": ["domain", "ipv4-addr"], "url": ["url"], "windows-service-name": ["windows-service-name"], "windows-service-displayname": ["windows-service-display-name"], "windows-scheduled-task": ["windows-scheduled-task"], } if type in types: resolved_types = types[type] if len(resolved_types) == 2: values = value.split("|") if resolved_types[0] == "ipv4-addr": type_0 = self.detect_ip_version(values[0]) else: type_0 = resolved_types[0] if resolved_types[1] == "ipv4-addr": type_1 = self.detect_ip_version(values[1]) else: type_1 = resolved_types[1] return [ {"type": type_0, "value": values[0]}, {"type": type_1, "value": values[1]}, ] else: if resolved_types[0] == "ipv4-addr": type_0 = self.detect_ip_version(value) else: type_0 = resolved_types[0] return [{"type": type_0, "value": value}] def detect_ip_version(self, value): if len(value) > 16: return "ipv6-addr" else: return "ipv4-addr" def resolve_markings(self, tags, with_default=True): markings = [] for tag in tags: if tag["name"] == "tlp:white": markings.append(TLP_WHITE) if tag["name"] == "tlp:green": markings.append(TLP_GREEN) if tag["name"] == "tlp:amber": markings.append(TLP_AMBER) if tag["name"] == "tlp:red": markings.append(TLP_RED) if len(markings) == 0 and with_default: markings.append(TLP_WHITE) return markings def resolve_tags(self, tags): opencti_tags = [] for tag in tags: if ( tag["name"] != "tlp:white" and tag["name"] != "tlp:green" and tag["name"] != "tlp:amber" and tag["name"] != "tlp:red" and not tag["name"].startswith("misp-galaxy:mitre-threat-actor") and not tag["name"].startswith("misp-galaxy:mitre-intrusion-set") and not tag["name"].startswith("misp-galaxy:mitre-malware") and not tag["name"].startswith("misp-galaxy:mitre-attack-pattern") and not tag["name"].startswith("misp-galaxy:mitre-tool") and not tag["name"].startswith("misp-galaxy:tool") and not tag["name"].startswith("misp-galaxy:ransomware") and not tag["name"].startswith("misp-galaxy:malpedia") ): tag_value = tag["name"] if '="' in tag["name"]: tag_value_split = tag["name"].split('="') tag_value = tag_value_split[1][:-1].strip() elif ":" in tag["name"]: tag_value_split = tag["name"].split(":") tag_value = tag_value_split[1].strip() if tag_value.isdigit(): if ":" in tag["name"]: tag_value_split = tag["name"].split(":") tag_value = tag_value_split[1].strip() else: tag_value = tag["name"] opencti_tags.append( {"tag_type": "MISP", "value": tag_value, "color": "#008ac8"} ) return opencti_tags
class Malpedia: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path)) if os.path.isfile(config_file_path) else {} ) self.interval = 1 # 1 Day interval between each scraping self.helper = OpenCTIConnectorHelper(config) # Extra config self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, ) self.MALPEDIA_API = get_config_variable( "MALPEDIA_API", ["malpedia", "MALPEDIA_API"], config ) self.AUTH_KEY = get_config_variable( "AUTH_KEY", ["malpedia", "AUTH_KEY"], config ) def get_interval(self): return int(self.interval) * 60 * 60 * 24 def next_run(self, seconds): return def run(self): self.helper.log_info("Fetching Malpedia datasets...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info( "Connector last run: " + datetime.utcfromtimestamp(last_run).strftime( "%Y-%m-%d %H:%M:%S" ) ) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ( (timestamp - last_run) > ((int(self.interval) - 1) * 60 * 60 * 24) ): self.helper.log_info("Connector will run!") ## CORE ## api_call = { "API_CHECK_APIKEY": "check/apikey", "API_GET_VERSION": "get/version", "API_GET_FAMILIES": "get/families", "API_LIST_ACTORS": "list/actors", "API_GET_FAMILY": "get/family/", "API_LIST_FAMILIES": "list/families", "API_GET_YARA": "get/yara/", } # API Key check r = requests.get( self.MALPEDIA_API + api_call["API_CHECK_APIKEY"], headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) response_json = r.json() if "Valid token" in response_json["detail"]: print("--- Authentication successful.") else: print("--- Authentication failed.") # API Version check r = requests.get(self.MALPEDIA_API + api_call["API_GET_VERSION"]) response_json = r.json() print( "--- Malpedia version: " + str(response_json["version"]) + " (" + response_json["date"] + ")" ) ###[TODO] Le check de la version : utiliser self.helper.set_state # if malpedia_latest_check is None: # global malpedia_latest_check = response_json["version"] # else: # if response_json["version"] > malpedia_latest_check: # y mettre la suite # else: # print("----- Version " + str(response_json["version"]) + " already imported.") ### MAIN GET ### ###get list of families r = requests.get( self.MALPEDIA_API + api_call["API_LIST_FAMILIES"], headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) list_of_families_json = r.json() ###get families r = requests.get( self.MALPEDIA_API + api_call["API_GET_FAMILIES"], headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) families_json = r.json() ###get list of actors r = requests.get( self.MALPEDIA_API + api_call["API_LIST_ACTORS"], headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) # list_actors_json = r.json() ### [TODO] y a pas de get/actors donc va falloir faire un appel pour chaque actor de la liste ### WORK ### # Link to malpedia website, to add in everything we create external_reference_malpedia = self.helper.api.external_reference.create( source_name="Malpedia (" + str(response_json["version"]) + " (" + response_json["date"] + ")", url="https://malpedia.caad.fkie.fraunhofer.de", ) malpedia_organization = self.helper.api.identity.create( type="Organization", name="Malpedia", description="Malpedia is a free service offered by Fraunhofer FKIE.", ) # for family in families: # print(json.dumps(list_of_families_json, indent=4, sort_keys=True)) for name in list_of_families_json: # we create the malware(family) malware = self.helper.api.malware.create( name=families_json[name]["common_name"], description=families_json[name]["description"], # TODO ajouter les alias contenu dans families_json[name]["alt_name"] ) print( "------- " + families_json[name]["common_name"] + " créé." ) # we add main external reference to malpedia website self.helper.api.stix_entity.add_external_reference( id=malware["id"], external_reference_id=external_reference_malpedia["id"], ) print("------------ Référence Malpedia créée") # we could too add each url referenced in the malpedia entity # for ref in families_json[name]["urls"]: # ref_exist = opencti_api_client.intrusion_set.read( # filters=[{"key": "URL", "values": [ref]}] # if not ref_exist: # external_reference = opencti_api_client.external_reference.create( # source_name="Malpedia's sources", url=ref # ) # ) # we add yara rules associated with the malware r = requests.get( self.MALPEDIA_API + api_call["API_GET_YARA"] + name, headers={"Authorization": "apitoken " + self.AUTH_KEY}, ) list_yara = r.json() for yara in list_yara: # yara contains tlp level for name_rule, rule in list_yara[yara].items(): print("----------- Begin Yara : " + name_rule) # extract yara date extract = re.search( "([0-9]{4}\-[0-9]{2}\-[0-9]{2})", rule ) if extract is None: date = response_json["date"] else: date = extract.group(1) # extract tlp # tlp = rule.split("TLP:")[1].split('"')[0] print("date ::::: " + date) print("name ::::: " + name_rule) print("rule ::::: " + rule) # add yara indicator = self.helper.api.indicator.create( name=name_rule, description="Yara from Malpedia", pattern_type="yara", indicator_pattern=rule, main_observable_type="File-SHA256", valid_from=date, ) print("----------- Yara : " + name_rule + " créée.") print("----------------- Création Relation : ") print(indicator["id"]) print(malware["id"]) print(date) print(external_reference_malpedia["id"]) print(families_json[name]["common_name"]) self.helper.api.stix_relation.create( fromType="Indicator", fromId=indicator["id"], toType="Malware", toId=malware["id"], relationship_type="indicates", first_seen=date, last_seen=date, description="Yara rules for " + families_json[name]["common_name"] + ".", weight=self.confidence_level, role_played="Unknown", createdByRef=malpedia_organization, ignore_dates=True, update=True, ) # Store the current timestamp as a last run self.helper.log_info( "Connector successfully run, storing last_run as " + str(timestamp) ) self.helper.set_state({"last_run": timestamp}) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days" ) time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class TheHive: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.SafeLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.thehive_url = get_config_variable("THEHIVE_URL", ["thehive", "url"], config) self.thehive_api_key = get_config_variable("THEHIVE_API_KEY", ["thehive", "api_key"], config) self.thehive_check_ssl = get_config_variable("THEHIVE_CHECK_SSL", ["thehive", "check_ssl"], config, False, True) self.thehive_organization_name = get_config_variable( "THEHIVE_ORGANIZATION_NAME", ["thehive", "organization_name"], config) self.thehive_import_from_date = get_config_variable( "THEHIVE_IMPORT_FROM_DATE", ["thehive", "import_from_date"], config, False, datetime.utcfromtimestamp(int( time.time())).strftime("%Y-%m-%d %H:%M:%S"), ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.identity = self.helper.api.identity.create( type="Organization", name=self.thehive_organization_name, description=self.thehive_organization_name, ) self.thehive_api = TheHiveApi(self.thehive_url, self.thehive_api_key, cert=self.thehive_check_ssl) def generate_case_bundle(self, case): markings = [] if case["tlp"] == 0: markings.append(TLP_WHITE) if case["tlp"] == 1: markings.append(TLP_GREEN) if case["tlp"] == 2: markings.append(TLP_AMBER) if case["tlp"] == 3: markings.append(TLP_RED) if len(markings) == 0: markings.append(TLP_WHITE) bundle_objects = [] incident = StixXOpenCTIIncident( id=OpenCTIStix2Utils.generate_random_stix_id("x-opencti-incident"), name=case["title"], description=case["description"], first_seen=datetime.utcfromtimestamp( int(case["createdAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"), last_seen=datetime.utcfromtimestamp( int(case["updatedAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"), object_marking_refs=markings, labels=case["tags"] if "tags" in case else [], created_by_ref=self.identity["standard_id"], ) bundle_objects.append(incident) # Get observables observables = self.thehive_api.get_case_observables( case_id=case["id"]).json() for observable in observables: if observable["dataType"] == "hash": if len(observable["data"]) == 32: data_type = "file_md5" elif len(observable["data"]) == 40: data_type = "file_sha1" elif len(observable["data"]) == 64: data_type = "file_sha256" else: data_type = "unknown" else: data_type = observable["dataType"] observable_key = OBSERVABLES_MAPPING[data_type] if observable_key is not None: stix_observable = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable"), key=observable_key, value=observable["data"], description=observable["message"], x_opencti_score=80 if observable["ioc"] else 50, object_marking_refs=markings, labels=observable["tags"] if "tags" in observable else [], created_by_ref=self.identity["standard_id"], x_opencti_create_indicator=observable["ioc"], ) stix_observable_relation = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship"), relationship_type="related-to", created_by_ref=self.identity["standard_id"], source_ref=stix_observable.id, target_ref=incident.id, object_marking_refs=markings, ) bundle_objects.append(stix_observable) bundle_objects.append(stix_observable_relation) if observable["sighted"]: fake_indicator_id = ( "indicator--c1034564-a9fb-429b-a1c1-c80116cc8e1e") stix_sighting = Sighting( id=OpenCTIStix2Utils.generate_random_stix_id( "sighting"), first_seen=datetime.utcfromtimestamp( int(observable["startDate"] / 1000)).strftime("%Y-%m-%dT%H:%M:%SZ"), last_seen=datetime.utcfromtimestamp( int(observable["startDate"] / 1000 + 3600)).strftime("%Y-%m-%dT%H:%M:%SZ"), where_sighted_refs=[self.identity["standard_id"]], sighting_of_ref=fake_indicator_id, custom_properties={ "x_opencti_sighting_of_ref": stix_observable.id }, ) bundle_objects.append(stix_sighting) bundle = Bundle(objects=bundle_objects).serialize() return bundle def run(self): self.helper.log_info("Starting TheHive Connector...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_case_date" in current_state: last_case_date = current_state["last_case_date"] self.helper.log_info( "Connector last_case_date: " + datetime.utcfromtimestamp(last_case_date).strftime( "%Y-%m-%d %H:%M:%S")) else: last_case_date = parse( self.thehive_import_from_date).timestamp() self.helper.log_info("Connector has no last_case_date") self.helper.log_info("Get cases since last run (" + datetime.utcfromtimestamp(last_case_date). strftime("%Y-%m-%d %H:%M:%S") + ")") query = Or( Gt("updatedAt", int(last_case_date * 1000)), Child("case_task", Gt("createdAt", int(last_case_date * 1000))), Child("case_artifact", Gt("createdAt", int(last_case_date * 1000))), ) cases = self.thehive_api.find_cases(query=query, sort="updatedAt", range="0-100").json() now = datetime.utcfromtimestamp(timestamp) friendly_name = "TheHive run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) try: for case in cases: stix_bundle = self.generate_case_bundle(case) self.helper.send_stix2_bundle( stix_bundle, update=self.update_existing_data, work_id=work_id, ) except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp) self.helper.log_info(message) self.helper.api.work.to_processed(work_id, message) current_state = self.helper.get_state() if current_state is None: current_state = {"last_case_date": timestamp} else: current_state["last_case_date"] = timestamp self.helper.set_state(current_state) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Talosip: def __init__(self): config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.talosip_interval = get_config_variable( "TALOSIP_INTERVAL", ["talosip", "interval"], config, True ) self.talosip_url = get_config_variable( "TALOSIP_URL", ["talosip", "url"], config ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.delete_old_data = get_config_variable( "DELETE_OLD_DATA", ["connector", "delete_old_data"], config ) self.helper = OpenCTIConnectorHelper(config) # get tag self.talos_tag = self.helper.api.tag.create( tag_type="Event", value="TalosIntelligence", color="#fc036b" ) self.ipv4_tag = self.helper.api.tag.create( tag_type="Event", value="ipv4-blacklist", color="#1c100b" ) # create identity self.helper.log_info("Creating an Identity...") self.entity_identity = self.helper.api.identity.create( name="Cisco Talos", type="Organization", description="Talosintilligence IP Blacklist", ) # create marking definition self.tlp_white_marking_definition = self.helper.api.marking_definition.read( filters={"key": "definition", "values": ["TLP:WHITE"]} ) # report published time self.published_report = None self.being_added = [] self.being_deleted = [] def delete_old_entity(self): if len(self.being_deleted) > 0: self.helper.log_info("Deleting old entity") for ip in self.being_deleted: # listing being deleted object_result = self.helper.api.stix_observable.read( filters=[{"key": "observable_value", "values": [ip]}], ) # deleting observable self.helper.api.stix_observable.delete(id=object_result["id"]) # deleting indicators for indicator_id in object_result["indicatorsIds"]: self.helper.api.stix_domain_entity.delete(id=indicator_id) # deleting external references for external_ref_id in object_result["externalReferencesIds"]: self.helper.api.stix_domain_entity.delete(id=external_ref_id) else: self.helper.log_info("Nothing to delete") def _get_published_report(self): published_time = ( os.path.dirname(os.path.abspath(__file__)) + "/published_time.txt" ) # Set and store published time to file. If file exists, get published time from file --> Avoid create new report if os.path.isfile(published_time): self.helper.log_info("Getting published time from file") read = open("published_time.txt", "r") published = read.read() else: self.helper.log_info("Setting new time") published = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") write = open("published_time.txt", "w") write.write(published) return published def check_diff(self, newfile, oldfile): # should use try except try: old_iplist = open(oldfile, "r") except: old_iplist = [] new_iplist = open(newfile, "r") parsed_old_list = [] parsed_new_list = [] for ip in old_iplist: ip = ip.strip("\n") parsed_old_list.append(ip) for ip in new_iplist: ip = ip.strip("\n") parsed_new_list.append(ip) self.being_added = [ip for ip in parsed_new_list if ip not in parsed_old_list] self.being_deleted = [ip for ip in parsed_old_list if ip not in parsed_new_list] self.helper.log_info( "{}/{} IOCs that are new will be added.".format( len(self.being_added), len(parsed_new_list) ) ) self.helper.log_info( "{} IOCs that are no longer in the list can be deleted.".format( len(self.being_deleted) ) ) def get_interval(self): return int(self.talosip_interval) * 60 * 60 * 24 def _create_observable(self, ip): # creating observable created_observable = self.helper.api.stix_observable.create( type="IPv4-Addr", observable_value=ip, createdByRef=self.entity_identity["id"], description="from talosip", markingDefinitions=self.tlp_white_marking_definition["id"], createIndicator=False, update=self.update_existing_data, ) # adding tag to created observable self.helper.api.stix_entity.add_tag( id=created_observable["id"], tag_id=self.talos_tag["id"] ) self.helper.api.stix_entity.add_tag( id=created_observable["id"], tag_id=self.ipv4_tag["id"] ) # create external references # adding external references return created_observable def _create_indicator(self, ip, observable_id): # create indicator created_indicator = self.helper.api.indicator.create( name=ip, indicator_pattern="[ipv4-addr:value = '" + ip + "']", markingDefinitions=self.tlp_white_marking_definition["id"], update=self.update_existing_data, main_observable_type="ipv4-addr", description="from talosip", ) # add tags self.helper.api.stix_entity.add_tag( id=created_indicator["id"], tag_id=self.ipv4_tag["id"] ) self.helper.api.stix_entity.add_tag( id=created_indicator["id"], tag_id=self.talos_tag["id"] ) # link to observable self.helper.log_info("Adding observable...") self.helper.api.indicator.add_stix_observable( id=created_indicator["id"], stix_observable_id=observable_id ) return created_indicator def _process_file(self): created_observable_id = [] created_indicator_id = [] new_black_list_file = ( os.path.dirname(os.path.abspath(__file__)) + "/ip_blacklist.txt" ) old_black_list_file = ( os.path.dirname(os.path.abspath(__file__)) + "/old_ip_blacklist.txt" ) # always fetch new file if os.path.isfile(new_black_list_file): self.helper.log_info( "[196] File IP blacklist existing, changing name to old file" ) # deleting file.... shutil.move(new_black_list_file, old_black_list_file) self.helper.log_info("[200] File name changed.") self.helper.log_info("Downloading file from {}".format(self.talosip_url)) wget.download(self.talosip_url, out="ip_blacklist.txt") # processing message... self.helper.log_info("[205] File downloaded. Processing data...") self.check_diff(new_black_list_file, old_black_list_file) for ip in self.being_added: created_observable = self._create_observable(ip) created_indicator = self._create_indicator(ip, created_observable["id"]) created_observable_id.append(created_observable["id"]) created_indicator_id.append(created_indicator["id"]) # create a report # create external reference self.helper.log_info("Creating external reference...") _report_external_reference = self.helper.api.external_reference.create( source_name="Talos Intelligence", url="https://talosintelligence.com/", ) self.helper.log_info("Creating report...") # create report created_report = self.helper.api.report.create( name="Talos Intelligence IP Blacklist", published=self._get_published_report(), markingDefinitions=self.tlp_white_marking_definition["id"], description="This report represents the blacklist provided by Cisco Talos", report_class="Threat Report", createdByRef=self.entity_identity["id"], external_reference_id=_report_external_reference["id"], update=self.update_existing_data, modified=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), ) # add tag to report self.helper.api.stix_entity.add_tag( id=created_report["id"], tag_id=self.talos_tag["id"] ) # add observables to report from id list self.helper.log_info("Adding observables to report...") for observable_id in created_observable_id: self.helper.api.report.add_stix_observable( id=created_report["id"], stix_observable_id=observable_id ) # add indicators to report from id list self.helper.log_info("Adding entity...") for indicator_id in created_indicator_id: self.helper.api.report.add_stix_entity( id=created_report["id"], entity_id=indicator_id ) self.helper.log_info( "Delete old data is set to {}".format(self.delete_old_data) ) if self.delete_old_data: self.delete_old_entity() else: pass def start(self): self.helper.log_info("[256] Fetching Talos IP database...") while True: try: timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info( "[258] Connector last run: " + datetime.utcfromtimestamp(last_run).strftime( "%Y-%m-%d %H:%M:%S" ) ) else: last_run = None self.helper.log_info("[265] Connector has never run") if last_run is None or ( (timestamp - last_run) > ((int(self.talosip_interval)) * 60 * 60 * 24) ): self.helper.log_info("[270] Connector will run!") self._process_file() self.helper.log_info( "[273] Connector successfully run, storing last_run as " + str(timestamp) ) self.helper.set_state({"last_run": timestamp}) self.helper.log_info( "[278] Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days" ) time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "[286] Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days" ) time.sleep(3600) except (KeyboardInterrupt, SystemExit): self.helper.log_info("[292] Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)