class InternalImportConnector: def __init__(self, config_file_path: str, api_client: OpenCTIApiClient, data: Dict): # set OPENCTI settings from fixture os.environ["OPENCTI_URL"] = api_client.api_url os.environ["OPENCTI_TOKEN"] = api_client.api_token os.environ["OPENCTI_SSL_VERIFY"] = str(api_client.ssl_verify) config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.data = data def _process_message(self, data: Dict) -> str: file_fetch = data["file_fetch"] file_uri = self.helper.opencti_url + file_fetch # Downloading and saving file to connector self.helper.log_info("Importing the file " + file_uri) observable = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable"), key=self.data["simple_observable_key"], value=self.data["simple_observable_value"], ) bundle_objects = [observable] entity_id = data.get("entity_id", None) report = self.helper.api.report.read(id=entity_id) report = Report( id=report["standard_id"], name=report["name"], description=report["description"], published=self.helper.api.stix2.format_date(report["published"]), report_types=report["report_types"], object_refs=bundle_objects, ) bundle_objects.append(report) # create stix bundle bundle = Bundle(objects=bundle_objects).serialize() # send data self.helper.send_stix2_bundle(bundle=bundle) return "foo" def stop(self): self.helper.stop() def start(self): try: self.helper.listen(self._process_message) except pika.exceptions.AMQPConnectionError: self.stop() raise ValueError( "Connector was not able to establish the connection to RabbitMQ" )
class Mitre: def __init__(self): # Get configuration config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' self.config = dict() if os.path.isfile(config_file_path): config = yaml.load(open(config_file_path), Loader=yaml.FullLoader) self.config_rabbitmq = config['rabbitmq'] self.config['name'] = config['mitre']['name'] self.config['confidence_level'] = config['mitre'][ 'confidence_level'] self.config['enterprise_file_url'] = config['mitre'][ 'enterprise_file_url'] self.config['entities'] = config['mitre']['entities'].split(',') self.config['interval'] = config['mitre']['interval'] self.config['log_level'] = config['mitre']['log_level'] else: self.config_rabbitmq = dict() self.config_rabbitmq['hostname'] = os.getenv( 'RABBITMQ_HOSTNAME', 'localhost') self.config_rabbitmq['port'] = os.getenv('RABBITMQ_PORT', 5672) self.config_rabbitmq['username'] = os.getenv( 'RABBITMQ_USERNAME', 'guest') self.config_rabbitmq['password'] = os.getenv( 'RABBITMQ_PASSWORD', 'guest') self.config['name'] = os.getenv('MITRE_NAME', 'MITRE ATT&CK') self.config['confidence_level'] = int( os.getenv('MITRE_CONFIDENCE_LEVEL', 3)) self.config['enterprise_file_url'] = os.getenv( 'MITRE_ENTERPRISE_FILE_URL', 'https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json' ) self.config['entities'] = os.getenv( 'MITRE_ENTITIES', 'attack-pattern,course-of-action,intrusion-set,malware,tool' ).split(',') self.config['interval'] = os.getenv('MITRE_INTERVAL', 5) self.config['log_level'] = os.getenv('MITRE_LOG_LEVEL', 'info') # Initialize OpenCTI Connector connector_identifier = ''.join(e for e in self.config['name'] if e.isalnum()) self.opencti_connector_helper = OpenCTIConnectorHelper( connector_identifier.lower(), self.config, self.config_rabbitmq, self.config['log_level']) def get_log_level(self): return self.config['log_level'] def get_interval(self): return int(self.config['interval']) * 60 * 60 * 24 def run(self): enterprise_data = urllib.request.urlopen( self.config['enterprise_file_url']).read() self.opencti_connector_helper.send_stix2_bundle( enterprise_data.decode('utf-8'), self.config['entities'])
class LastInfoSec: def __init__(self): config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.lastinfosec_url = get_config_variable("CONFIG_LIS_URL", ["lastinfosec", "api_url"], config) self.lastinfosec_apikey = get_config_variable( "CONFIG_LIS_APIKEY", ["lastinfosec", "api_key"], config) self.opencti_url = get_config_variable("OPENCTI_URL", ["opencti", "url"], config) self.opencti_id = get_config_variable("OPENCTI_TOKEN", ["opencti", "token"], config) self.update_existing_data = True self.api = OpenCTIApiClient(self.opencti_url, self.opencti_id) def run(self): self.helper.log_info("Fetching lastinfosec datasets...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) now = datetime.datetime.utcfromtimestamp(timestamp) friendly_name = "MITRE run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) lastinfosec_data = requests.get( self.lastinfosec_url + self.lastinfosec_apikey).json() if "message" in lastinfosec_data.keys(): for data in lastinfosec_data["message"]: sdata = json.dumps(data) self.helper.send_stix2_bundle(sdata, work_id=work_id) # Store the current timestamp as a last run message = ( "Connector successfully run, storing last_run as {0}". format(timestamp)) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info(message) time.sleep(3500) else: message = ( "Connector successfully run, storing last_run as {0}". format(timestamp)) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info(message) time.sleep(300) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error("run:" + str(e)) time.sleep(60)
class OpenCTI: def __init__(self): # Get configuration config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' self.config = dict() if os.path.isfile(config_file_path): config = yaml.load(open(config_file_path), Loader=yaml.FullLoader) self.config_rabbitmq = config['rabbitmq'] self.config['name'] = config['opencti']['name'] self.config['confidence_level'] = config['opencti'][ 'confidence_level'] self.config['sectors_file_url'] = config['opencti'][ 'sectors_file_url'] self.config['entities'] = config['opencti']['entities'].split(',') self.config['interval'] = config['opencti']['interval'] self.config['log_level'] = config['opencti']['log_level'] else: self.config_rabbitmq = dict() self.config_rabbitmq['hostname'] = os.getenv( 'RABBITMQ_HOSTNAME', 'localhost') self.config_rabbitmq['port'] = os.getenv('RABBITMQ_PORT', 5672) self.config_rabbitmq['username'] = os.getenv( 'RABBITMQ_USERNAME', 'guest') self.config_rabbitmq['password'] = os.getenv( 'RABBITMQ_PASSWORD', 'guest') self.config['name'] = os.getenv('OPENCTI_NAME', 'OpenCTI') self.config['confidence_level'] = int( os.getenv('OPENCTI_CONFIDENCE_LEVEL', 5)) self.config['sectors_file_url'] = os.getenv( 'OPENCTI_SECTORS_FILE_URL', 'https://raw.githubusercontent.com/OpenCTI-Platform/datasets/master/data/sectors.json' ) self.config['entities'] = os.getenv( 'OPENCTI_ENTITIES', 'sector,region,country,city').split(',') self.config['interval'] = os.getenv('OPENCTI_INTERVAL', 1) self.config['log_level'] = os.getenv('OPENCTI_LOG_LEVEL', 'info') # Initialize OpenCTI Connector connector_identifier = ''.join(e for e in self.config['name'] if e.isalnum()) self.opencti_connector_helper = OpenCTIConnectorHelper( connector_identifier.lower(), self.config, self.config_rabbitmq, self.config['log_level']) def get_log_level(self): return self.config['log_level'] def get_interval(self): return int(self.config['interval']) * 60 * 60 * 24 def run(self): sectors_data = urllib.request.urlopen( self.config['sectors_file_url']).read() self.opencti_connector_helper.send_stix2_bundle( sectors_data.decode('utf-8'), self.config['entities'])
class SynchronizerConnector: def __init__(self): config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.remote_opencti_url = get_config_variable( "REMOTE_OPENCTI_URL", ["remote_opencti", "url"], config) self.remote_opencti_ssl_verify = get_config_variable( "REMOTE_OPENCTI_SSL_VERIFY", ["remote_opencti", "ssl_verify"], config) self.remote_opencti_token = get_config_variable( "REMOTE_OPENCTI_TOKEN", ["remote_opencti", "token"], config) self.remote_opencti_events = get_config_variable( "REMOTE_OPENCTI_EVENTS", ["remote_opencti", "events"], config).split(",") self.remote_opencti_start_timestamp = get_config_variable( "REMOTE_OPENCTI_START_TIMESTAMP", ["remote_opencti", "start_timestamp"], config, ) def _process_message(self, msg): data = json.loads(msg.data) try: if "create" in self.remote_opencti_events and msg.event == "create": bundle = json.dumps({"objects": [data["data"]]}) self.helper.send_stix2_bundle(bundle, event_version=data["version"]) elif "update" in self.remote_opencti_events and msg.event == "update": bundle = json.dumps({"objects": [data["data"]]}) self.helper.send_stix2_bundle(bundle, event_version=data["version"]) elif "delete" in self.remote_opencti_events and msg.event == "delete": if data["data"]["type"] == "relationship": self.helper.api.stix_core_relationship.delete( id=data["data"]["id"]) elif StixCyberObservableTypes.has_value(data["data"]["type"]): self.helper.api.stix_cyber_observable.delete( id=data["data"]["id"]) else: self.helper.api.stix_domain_object.delete( id=data["data"]["id"]) except: pass def start(self): self.helper.listen_stream( self._process_message, self.remote_opencti_url, self.remote_opencti_token, self.remote_opencti_ssl_verify, self.remote_opencti_start_timestamp, )
class ImportFileStix: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) def _process_message(self, data): file_fetch = data["file_fetch"] file_uri = self.helper.opencti_url + file_fetch self.helper.log_info("Importing the file " + file_uri) file_content = self.helper.api.fetch_opencti_file(file_uri) if data["file_mime"] == "text/xml": initialize_options() file_content = elevate(file_content) bundles_sent = self.helper.send_stix2_bundle(file_content) return "Sent " + str(len(bundles_sent)) + " stix bundle(s) for worker import" # Start the main loop def start(self): self.helper.listen(self._process_message)
class ImportFileStix: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader ) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) def _process_message(self, data): file_path = data['file_path'] update = data['update'] file_uri = self.helper.opencti_url + file_path self.helper.log_info('Importing the file ' + file_uri) file_content = self.helper.api.fetch_opencti_file(file_uri) bundles_sent = self.helper.send_stix2_bundle(file_content, None, update) return [ 'Sent ' + str(len(bundles_sent)) + ' stix bundle(s) for worker import' ] # Start the main loop def start(self): self.helper.listen(self._process_message)
class ImportFileStix: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) def _process_message(self, data): old_token = self.helper.api.get_token() token = None if "token" in data: token = data["token"] file_path = data["file_path"] update = data["update"] file_uri = self.helper.opencti_url + file_path self.helper.log_info("Importing the file " + file_uri) file_content = self.helper.api.fetch_opencti_file(file_uri) if token: self.helper.api.set_token(token) bundles_sent = self.helper.send_stix2_bundle(file_content, None, update) self.helper.api.set_token(old_token) return [ "Sent " + str(len(bundles_sent)) + " stix bundle(s) for worker import" ] # Start the main loop def start(self): self.helper.listen(self._process_message)
class Misp: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) # Extra config self.misp_url = get_config_variable('MISP_URL', ['misp', 'url'], config) self.misp_key = get_config_variable('MISP_KEY', ['misp', 'key'], config) self.misp_ssl_verify = get_config_variable('MISP_SSL_VERIFY', ['misp', 'ssl_verify'], config) self.misp_create_report = get_config_variable('MISP_CREATE_REPORTS', ['misp', 'create_reports'], config) self.misp_report_class = get_config_variable( 'MISP_REPORT_CLASS', ['misp', 'report_class'], config ) or 'MISP Event' self.misp_import_from_date = get_config_variable('MISP_IMPORT_FROM_DATE', ['misp', 'import_from_date'], config) self.misp_import_tags = get_config_variable('MISP_IMPORT_TAGS', ['misp', 'import_tags'], config) self.misp_interval = get_config_variable('MISP_INTERVAL', ['misp', 'interval'], config, True) self.update_existing_data = get_config_variable( 'CONNECTOR_UPDATE_EXISTING_DATA', ['connector', 'update_existing_data'], config ) # Initialize MISP self.misp = ExpandedPyMISP(url=self.misp_url, key=self.misp_key, ssl=self.misp_ssl_verify, debug=False) def get_interval(self): return int(self.misp_interval) * 60 def run(self): while True: timestamp = int(time.time()) # Get the last_run datetime current_state = self.helper.get_state() if current_state is not None and 'last_run' in current_state: last_run = datetime.utcfromtimestamp(current_state['last_run']).strftime('%Y-%m-%d %H:%M:%S') self.helper.log_info( 'Connector last run: ' + last_run) else: last_run = None self.helper.log_info('Connector has never run') # If import with tags complex_query_tag = None if self.misp_import_tags is not None: or_parameters = [] for tag in self.misp_import_tags.split(','): or_parameters.append(tag.strip()) complex_query_tag = self.misp.build_complex_query(or_parameters=or_parameters) # If import from a specific date import_from_date = None if self.misp_import_from_date is not None: import_from_date = parse(self.misp_import_from_date).strftime('%Y-%m-%d %H:%M:%S') # Prepare the query kwargs = dict() if complex_query_tag is not None: kwargs['tags'] = complex_query_tag if last_run is not None: kwargs['timestamp'] = last_run elif import_from_date is not None: kwargs['date_from'] = import_from_date # Query with pagination of 100 current_page = 1 while True: kwargs['limit'] = 100 kwargs['page'] = current_page self.helper.log_info('Fetching MISP events with args: ' + json.dumps(kwargs)) events = self.misp.search('events', **kwargs) self.helper.log_info('MISP returned ' + str(len(events)) + ' events.') # Break if no more result if len(events) == 0: break self.process_events(events) current_page += 1 # Set the last_run timestamp self.helper.set_state({'last_run': timestamp}) time.sleep(self.get_interval()) def process_events(self, events): for event in events: ### Default variables added_markings = [] added_entities = [] added_object_refs = [] ### Pre-process # Author author = Identity(name=event['Event']['Orgc']['name'], identity_class='organization') # Elements event_elements = self.prepare_elements(event['Event']['Galaxy']) # Markings if 'Tag' in event['Event']: event_markings = self.resolve_markings(event['Event']['Tag']) else: event_markings = [TLP_WHITE] # ExternalReference event_external_reference = ExternalReference( source_name=self.helper.connect_name, external_id=event['Event']['uuid'], url=self.misp_url + '/events/view/' + event['Event']['uuid']) ### Get indicators indicators = [] # Get attributes for attribute in event['Event']['Attribute']: indicator = self.process_attribute(author, event_elements, event_markings, attribute) if indicator is not None: indicators.append(indicator) # Get attributes of objects objects_relationships = [] for object in event['Event']['Object']: object_attributes = [] for attribute in object['Attribute']: indicator = self.process_attribute(author, event_elements, event_markings, attribute) if indicator is not None: indicators.append(indicator) if object['meta-category'] == 'file' and indicator[ 'indicator'].x_opencti_observable_type in FILETYPES: object_attributes.append(indicator) objects_relationships.extend(self.process_observable_relations(object_attributes, [])) ### Prepare the bundle bundle_objects = [author] object_refs = [] # Add event markings for event_marking in event_markings: if event_marking['id'] not in added_markings: bundle_objects.append(event_marking) added_markings.append(event_marking['id']) # Add event elements all_event_elements = \ event_elements['intrusion_sets'] + \ event_elements['malwares'] + \ event_elements['tools'] + \ event_elements['attack_patterns'] for event_element in all_event_elements: if event_element['name'] not in added_object_refs: object_refs.append(event_element) added_object_refs.append(event_element['name']) if event_element['name'] not in added_entities: bundle_objects.append(event_element) added_entities.append(event_element['name']) # Add indicators for indicator in indicators: if indicator['indicator']['id'] not in added_object_refs: object_refs.append(indicator['indicator']) added_object_refs.append(indicator['indicator']['id']) if indicator['indicator']['id'] not in added_entities: bundle_objects.append(indicator['indicator']) added_entities.append(indicator['indicator']['id']) # Add attribute markings for attribute_marking in indicator['markings']: if attribute_marking['id'] not in added_markings: bundle_objects.append(attribute_marking) added_markings.append(attribute_marking['id']) # Add attribute elements all_attribute_elements = \ indicator['attribute_elements']['intrusion_sets'] + \ indicator['attribute_elements']['malwares'] + \ indicator['attribute_elements']['tools'] + \ indicator['attribute_elements']['attack_patterns'] for attribute_element in all_attribute_elements: if attribute_element['name'] not in added_object_refs: object_refs.append(attribute_element) added_object_refs.append(attribute_element['name']) if attribute_element['name'] not in added_entities: bundle_objects.append(attribute_element) added_entities.append(attribute_element['name']) # Add attribute relationships for relationship in indicator['relationships']: object_refs.append(relationship) bundle_objects.append(relationship) # Add object_relationships for object_relationship in objects_relationships: bundle_objects.append(object_relationship) ### Create the report if needed if self.misp_create_report and len(object_refs) > 0: report = Report( name=event['Event']['info'], description=event['Event']['info'], published=parse(event['Event']['date']), created_by_ref=author, object_marking_refs=event_markings, labels=['threat-report'], object_refs=object_refs, external_references=[event_external_reference], custom_properties={ 'x_opencti_report_class': self.misp_report_class, 'x_opencti_object_status': 2 } ) bundle_objects.append(report) bundle = Bundle(objects=bundle_objects).serialize() self.helper.send_stix2_bundle(bundle, None, self.update_existing_data, False) def process_attribute(self, author, event_elements, event_markings, attribute): resolved_attributes = self.resolve_type(attribute['type'], attribute['value']) if resolved_attributes is None: return None for resolved_attribute in resolved_attributes: ### Pre-process # Elements attribute_elements = self.prepare_elements(attribute['Galaxy']) # Markings if 'Tag' in attribute: attribute_markings = self.resolve_markings(attribute['Tag'], with_default=False) if len(attribute_markings) == 0: attribute_markings = event_markings else: attribute_markings = event_markings ### Create the indicator observable_type = resolved_attribute['type'] observable_value = resolved_attribute['value'] pattern_type = 'stix' if observable_type in PATTERNTYPES: pattern_type = observable_type elif observable_type not in OPENCTISTIX2: return None else: if 'transform' in OPENCTISTIX2[observable_type]: if OPENCTISTIX2[observable_type]['transform']['operation'] == 'remove_string': observable_value = observable_value.replace(OPENCTISTIX2[observable_type]['transform']['value'], '') lhs = ObjectPath(OPENCTISTIX2[observable_type]['type'], OPENCTISTIX2[observable_type]['path']) observable_value = ObservationExpression(EqualityComparisonExpression(lhs, observable_value)) try: indicator = Indicator( name=resolved_attribute['value'], description=attribute['comment'], pattern=str(observable_value), valid_from=datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime('%Y-%m-%dT%H:%M:%SZ'), labels=['malicious-activity'], created_by_ref=author, object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_observable_type': resolved_attribute['type'], 'x_opencti_observable_value': resolved_attribute['value'], 'x_opencti_pattern_type': pattern_type } ) except: return None ### Create the relationships relationships = [] # Event threats for threat in (event_elements['intrusion_sets'] + event_elements['malwares'] + event_elements['tools']): relationships.append( Relationship( relationship_type='indicates', created_by_ref=author, source_ref=indicator.id, target_ref=threat.id, description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level } ) ) # Attribute threats for threat in (attribute_elements['intrusion_sets'] + attribute_elements['malwares'] + attribute_elements[ 'tools']): relationships.append( Relationship( relationship_type='indicates', created_by_ref=author, source_ref=indicator.id, target_ref=threat.id, description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level } ) ) # Event Attack Patterns for attack_pattern in event_elements['attack_patterns']: if len(event_elements['malwares']) > 0: threats = event_elements['malwares'] elif len(event_elements['intrusion_sets']) > 0: threats = event_elements['intrusion_sets'] else: threats = [] for threat in threats: relationship_uses = Relationship( relationship_type='uses', created_by_ref=author, source_ref=threat.id, target_ref=attack_pattern.id, description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level, 'x_opencti_ignore_dates': True } ) relationships.append(relationship_uses) relationship_indicates = Relationship( relationship_type='indicates', created_by_ref=author, source_ref=indicator.id, target_ref='malware--fa42a846-8d90-4e51-bc29-71d5b4802168', # Fake description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level, 'x_opencti_source_ref': indicator.id, 'x_opencti_target_ref': relationship_uses.id } ) relationships.append(relationship_indicates) # Attribute Attack Patterns for attack_pattern in attribute_elements['attack_patterns']: if len(attribute_elements['malwares']) > 0: threats = attribute_elements['malwares'] elif len(attribute_elements['intrusion_sets']) > 0: threats = attribute_elements['intrusion_sets'] else: threats = [] for threat in threats: relationship_uses = Relationship( relationship_type='uses', created_by_ref=author, source_ref=threat.id, target_ref=attack_pattern.id, description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level, 'x_opencti_ignore_dates': True } ) relationships.append(relationship_uses) relationship_indicates = Relationship( relationship_type='indicates', created_by_ref=author, source_ref=indicator.id, target_ref='malware--fa42a846-8d90-4e51-bc29-71d5b4802168', # Fake description=attribute['comment'], object_marking_refs=attribute_markings, custom_properties={ 'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime( '%Y-%m-%dT%H:%M:%SZ'), 'x_opencti_weight': self.helper.connect_confidence_level, 'x_opencti_source_ref': indicator.id, 'x_opencti_target_ref': relationship_uses.id, 'x_opencti_ignore_dates': True } ) relationships.append(relationship_indicates) return { 'indicator': indicator, 'relationships': relationships, 'attribute_elements': attribute_elements, 'markings': attribute_markings } def process_observable_relations(self, object_attributes, result_table, start_element=0): if start_element == 0: result_table = [] if len(object_attributes) == 1: return [] for x in range(start_element + 1, len(object_attributes)): result_table.append( Relationship( relationship_type='corresponds', source_ref=object_attributes[start_element]['indicator']['id'], target_ref=object_attributes[x]['indicator']['id'], description='Same file', custom_properties={ 'x_opencti_ignore_dates': True } ) ) if start_element != len(object_attributes): return self.process_observable_relations(object_attributes, result_table, start_element + 1) else: return result_table def prepare_elements(self, galaxies): elements = {'intrusion_sets': [], 'malwares': [], 'tools': [], 'attack_patterns': []} added_names = [] for galaxy in galaxies: # Get the linked intrusion sets if ( (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Intrusion Set') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Threat Actor') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Microsoft Activity Group actor') ): for galaxy_entity in galaxy['GalaxyCluster']: if ' - G' in galaxy_entity['value']: name = galaxy_entity['value'].split(' - G')[0] elif 'APT ' in galaxy_entity['value']: name = galaxy_entity['value'].replace('APT ', 'APT') else: name = galaxy_entity['value'] if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']: aliases = galaxy_entity['meta']['synonyms'] else: aliases = [name] if name not in added_names: elements['intrusion_sets'].append(IntrusionSet( name=name, labels=['intrusion-set'], description=galaxy_entity['description'], custom_properties={ 'x_opencti_aliases': aliases } )) added_names.append(name) # Get the linked malwares if ( (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Malware') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Tool') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Ransomware') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Android') or (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Malpedia') ): for galaxy_entity in galaxy['GalaxyCluster']: if ' - S' in galaxy_entity['value']: name = galaxy_entity['value'].split(' - S')[0] else: name = galaxy_entity['value'] if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']: aliases = galaxy_entity['meta']['synonyms'] else: aliases = [name] if name not in added_names: elements['malwares'].append(Malware( name=name, labels=['malware'], description=galaxy_entity['description'], custom_properties={ 'x_opencti_aliases': aliases } )) added_names.append(name) # Get the linked tools if ( (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Tool') ): for galaxy_entity in galaxy['GalaxyCluster']: if ' - S' in galaxy_entity['value']: name = galaxy_entity['value'].split(' - S')[0] else: name = galaxy_entity['value'] if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']: aliases = galaxy_entity['meta']['synonyms'] else: aliases = [name] if name not in added_names: elements['tools'].append(Tool( name=name, labels=['tool'], description=galaxy_entity['description'], custom_properties={ 'x_opencti_aliases': aliases } )) added_names.append(name) # Get the linked attack_patterns if ( (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Attack Pattern') ): for galaxy_entity in galaxy['GalaxyCluster']: if ' - T' in galaxy_entity['value']: name = galaxy_entity['value'].split(' - T')[0] else: name = galaxy_entity['value'] if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']: aliases = galaxy_entity['meta']['synonyms'] else: aliases = [name] if name not in added_names: elements['attack_patterns'].append(AttackPattern( name=name, labels=['attack-pattern'], description=galaxy_entity['description'], custom_properties={ 'x_opencti_external_id': galaxy_entity['meta']['external_id'][0], 'x_opencti_aliases': aliases, } )) added_names.append(name) return elements def resolve_type(self, type, value): types = { 'md5': ['file-md5'], 'sha1': ['file-sha1'], 'sha256': ['file-sha256'], 'filename': ['file-name'], 'pdb': ['pdb-path'], 'filename|md5': ['file-name', 'file-md5'], 'filename|sha1': ['file-name', 'file-sha1'], 'filename|sha256': ['file-name', 'file-sha256'], 'ip-src': ['ipv4-addr'], 'ip-dst': ['ipv4-addr'], 'hostname': ['domain'], 'domain': ['domain'], 'domain|ip': ['domain', 'ipv4-addr'], 'url': ['url'], 'windows-service-name': ['windows-service-name'], 'windows-service-displayname': ['windows-service-display-name'], 'windows-scheduled-task': ['windows-scheduled-task'] } if type in types: resolved_types = types[type] if len(resolved_types) == 2: values = value.split('|') if resolved_types[0] == 'ipv4-addr': type_0 = self.detect_ip_version(values[0]) else: type_0 = resolved_types[0] if resolved_types[1] == 'ipv4-addr': type_1 = self.detect_ip_version(values[1]) else: type_1 = resolved_types[1] return [{'type': type_0, 'value': values[0]}, {'type': type_1, 'value': values[1]}] else: if resolved_types[0] == 'ipv4-addr': type_0 = self.detect_ip_version(value) else: type_0 = resolved_types[0] return [{'type': type_0, 'value': value}] def detect_ip_version(self, value): if len(value) > 16: return 'ipv6-addr' else: return 'ipv4-addr' def resolve_markings(self, tags, with_default=True): markings = [] for tag in tags: if tag['name'] == 'tlp:white': markings.append(TLP_WHITE) if tag['name'] == 'tlp:green': markings.append(TLP_GREEN) if tag['name'] == 'tlp:amber': markings.append(TLP_AMBER) if tag['name'] == 'tlp:red': markings.append(TLP_RED) if len(markings) == 0 and with_default: markings.append(TLP_WHITE) return markings
class ExternalImportConnector: def __init__(self, config_file_path: str, api_client: OpenCTIApiClient, data: Dict): # set OPENCTI settings from fixture os.environ["OPENCTI_URL"] = api_client.api_url os.environ["OPENCTI_TOKEN"] = api_client.api_token os.environ["OPENCTI_SSL_VERIFY"] = str(api_client.ssl_verify) os.environ["OPENCTI_JSON_LOGGING"] = "true" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.interval = get_config_variable("INTERVAL", ["test", "interval"], config, True) self.data = data def get_interval(self): return int(self.interval) def run(self): now = datetime.utcfromtimestamp(time.time()) now_time = now.strftime("%Y-%m-%d %H:%M:%S") friendly_name = f"{self.helper.connect_name} run @ {now_time}" work_id = self.helper.api.work.initiate_work(self.helper.connect_id, friendly_name) bundle_objects = [] for elem in self.data: sdo = elem["class"]( id=elem["id"], name=elem["name"], description=elem["description"], ) bundle_objects.append(sdo) # create stix bundle bundle = Bundle(objects=bundle_objects).serialize() # send data self.helper.send_stix2_bundle( bundle=bundle, entities_types=self.helper.connect_scope, update=True, work_id=work_id, ) message = "Connector successfully run, storing last_run as " + str( now_time) self.helper.api.work.to_processed(work_id, message) return "Foo" def stop(self): self.helper.stop() def start(self): try: self.run() except pika.exceptions.AMQPConnectionError: self.stop() raise ValueError( "Connector was not able to establish the connection to RabbitMQ" )
class Mitre: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.mitre_enterprise_file_url = get_config_variable( "MITRE_ENTERPRISE_FILE_URL", ["mitre", "enterprise_file_url"], config) self.mitre_pre_attack_file_url = get_config_variable( "MITRE_PRE_ATTACK_FILE_URL", ["mitre", "pre_attack_file_url"], config) self.mitre_mobile_attack_file_url = get_config_variable( "MITRE_MOBILE_ATTACK_FILE_URL", ["mitre", "mobile_attack_file_url"], config) self.mitre_interval = get_config_variable("MITRE_INTERVAL", ["mitre", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self): return int(self.mitre_interval) * 60 * 60 * 24 def next_run(self, seconds): return def run(self): self.helper.log_info("Fetching MITRE datasets...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.mitre_interval) - 1) * 60 * 60 * 24)): self.helper.log_info("Connector will run!") try: enterprise_data = (urllib.request.urlopen( self.mitre_enterprise_file_url).read().decode( "utf-8")) self.helper.send_stix2_bundle( enterprise_data, self.helper.connect_scope, self.update_existing_data, ) except Exception as e: self.helper.log_error(str(e)) try: pre_attack_data = (urllib.request.urlopen( self.mitre_pre_attack_file_url).read().decode( "utf-8")) self.helper.send_stix2_bundle( pre_attack_data, self.helper.connect_scope, self.update_existing_data, ) except Exception as e: self.helper.log_error(str(e)) try: mobile_attack_data = (urllib.request.urlopen( self.mitre_mobile_attack_file_url).read().decode( "utf-8")) self.helper.send_stix2_bundle( mobile_attack_data, self.helper.connect_scope, self.update_existing_data, ) except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run self.helper.log_info( "Connector successfully run, storing last_run as " + str(timestamp)) self.helper.set_state({"last_run": timestamp}) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Cve: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.cve_import_history = get_config_variable( "CVE_IMPORT_HISTORY", ["cve", "import_history"], config, False) self.cve_nvd_data_feed = get_config_variable("CVE_NVD_DATA_FEED", ["cve", "nvd_data_feed"], config) self.cve_history_data_feed = get_config_variable( "CVE_HISTORY_DATA_FEED", ["cve", "history_data_feed"], config) self.cve_interval = get_config_variable("CVE_INTERVAL", ["cve", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self): return int(self.cve_interval) * 60 * 60 * 24 def delete_files(self): if os.path.exists("data.json"): os.remove("data.json") if os.path.exists("data.json.gz"): os.remove("data.json.gz") if os.path.exists("data-stix2.json"): os.remove("data-stix2.json") def convert_and_send(self, url, work_id): try: # Downloading json.gz file self.helper.log_info("Requesting the file " + url) response = urllib.request.urlopen( url, context=ssl.create_default_context(cafile=certifi.where())) image = response.read() with open( os.path.dirname(os.path.abspath(__file__)) + "/data.json.gz", "wb") as file: file.write(image) # Unzipping the file self.helper.log_info("Unzipping the file") with gzip.open("data.json.gz", "rb") as f_in: with open("data.json", "wb") as f_out: shutil.copyfileobj(f_in, f_out) # Converting the file to stix2 self.helper.log_info("Converting the file") convert("data.json", "data-stix2.json") with open("data-stix2.json") as stix_json: contents = stix_json.read() self.helper.send_stix2_bundle( contents, entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) # Remove files self.delete_files() except Exception as e: self.delete_files() self.helper.log_error(str(e)) time.sleep(60) def run(self): self.helper.log_info("Fetching CVE knowledge...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.cve_interval) - 1) * 60 * 60 * 24)): timestamp = int(time.time()) now = datetime.utcfromtimestamp(timestamp) friendly_name = "CVE run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) self.convert_and_send(self.cve_nvd_data_feed, work_id) # If import history and never run if last_run is None and self.cve_import_history: now = datetime.now() years = list(range(2002, now.year + 1)) for year in years: self.convert_and_send( f"{self.cve_history_data_feed}nvdcve-1.1-{year}.json.gz", work_id, ) # Store the current timestamp as a last run self.helper.log_info( "Connector successfully run, storing last_run as " + str(timestamp)) self.helper.set_state({"last_run": timestamp}) message = ( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") self.helper.api.work.to_processed(work_id, message) self.helper.log_info(message) time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class ReportImporter: def __init__(self) -> None: # Instantiate the connector helper from config base_path = os.path.dirname(os.path.abspath(__file__)) config_file_path = base_path + "/../config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.create_indicator = get_config_variable( "IMPORT_DOCUMENT_CREATE_INDICATOR", ["import_document", "create_indicator"], config, ) # Load Entity and Observable configs observable_config_file = base_path + "/config/observable_config.ini" entity_config_file = base_path + "/config/entity_config.ini" if os.path.isfile(observable_config_file) and os.path.isfile( entity_config_file): self.observable_config = self._parse_config( observable_config_file, Observable) else: raise FileNotFoundError(f"{observable_config_file} was not found") if os.path.isfile(entity_config_file): self.entity_config = self._parse_config(entity_config_file, EntityConfig) else: raise FileNotFoundError(f"{entity_config_file} was not found") def _process_message(self, data: Dict) -> str: self.helper.log_info("Processing new message") file_name = self._download_import_file(data) entity_id = data.get("entity_id", None) bypass_validation = data.get("bypass_validation", False) entity = (self.helper.api.stix_domain_object.read( id=entity_id) if entity_id is not None else None) if self.helper.get_only_contextual() and entity is None: return "Connector is only contextual and entity is not defined. Nothing was imported" # Retrieve entity set from OpenCTI entity_indicators = self._collect_stix_objects(self.entity_config) # Parse report parser = ReportParser(self.helper, entity_indicators, self.observable_config) parsed = parser.run_parser(file_name, data["file_mime"]) os.remove(file_name) if not parsed: return "No information extracted from report" # Process parsing results self.helper.log_debug("Results: {}".format(parsed)) observables, entities = self._process_parsing_results(parsed, entity) # Send results to OpenCTI observable_cnt = self._process_parsed_objects(entity, observables, entities, bypass_validation, file_name) entity_cnt = len(entities) if self.helper.get_validate_before_import() and not bypass_validation: return "Generated bundle sent for validation" else: return ( f"Sent {observable_cnt} observables, 1 report update and {entity_cnt} entity connections as stix " f"bundle for worker import ") def start(self) -> None: self.helper.listen(self._process_message) def _download_import_file(self, data: Dict) -> str: file_fetch = data["file_fetch"] file_uri = self.helper.opencti_url + file_fetch # Downloading and saving file to connector self.helper.log_info("Importing the file " + file_uri) file_name = os.path.basename(file_fetch) file_content = self.helper.api.fetch_opencti_file(file_uri, True) with open(file_name, "wb") as f: f.write(file_content) return file_name def _collect_stix_objects( self, entity_config_list: List[EntityConfig]) -> List[Entity]: base_func = self.helper.api entity_list = [] for entity_config in entity_config_list: func_format = entity_config.stix_class try: custom_function = getattr(base_func, func_format) entries = custom_function.list( getAll=True, filters=entity_config.filter, customAttributes=entity_config.custom_attributes, ) entity_list += entity_config.convert_to_entity( entries, self.helper) except AttributeError: e = "Selected parser format is not supported: {}".format( func_format) raise NotImplementedError(e) return entity_list @staticmethod def _parse_config(config_file: str, file_class: Callable) -> List[BaseModel]: config = MyConfigParser() config.read(config_file) config_list = [] for section, content in config.as_dict().items(): content["name"] = section config_object = file_class(**content) config_list.append(config_object) return config_list def _process_parsing_results( self, parsed: List[Dict], context_entity: Dict) -> (List[SimpleObservable], List[str]): observables = [] entities = [] if context_entity is not None: object_markings = [ x["standard_id"] for x in context_entity.get("objectMarking", []) ] # external_references = [x['standard_id'] for x in report.get('externalReferences', [])] # labels = [x['standard_id'] for x in report.get('objectLabel', [])] author = context_entity.get("createdBy") else: object_markings = [] author = None if author is not None: author = author.get("standard_id", None) for match in parsed: if match[RESULT_FORMAT_TYPE] == OBSERVABLE_CLASS: if match[RESULT_FORMAT_CATEGORY] == "Vulnerability.name": entity = self.helper.api.vulnerability.read( filters={ "key": "name", "values": [match[RESULT_FORMAT_MATCH]] }) if entity is None: self.helper.log_info( f"Vulnerability with name '{match[RESULT_FORMAT_MATCH]}' could not be " f"found. Is the CVE Connector activated?") continue entities.append(entity["standard_id"]) elif match[ RESULT_FORMAT_CATEGORY] == "Attack-Pattern.x_mitre_id": entity = self.helper.api.attack_pattern.read( filters={ "key": "x_mitre_id", "values": [match[RESULT_FORMAT_MATCH]], }) if entity is None: self.helper.log_info( f"AttackPattern with MITRE ID '{match[RESULT_FORMAT_MATCH]}' could not be " f"found. Is the MITRE Connector activated?") continue entities.append(entity["standard_id"]) else: observable = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable"), key=match[RESULT_FORMAT_CATEGORY], value=match[RESULT_FORMAT_MATCH], x_opencti_create_indicator=self.create_indicator, object_marking_refs=object_markings, created_by_ref=author, # labels=labels, # external_references=external_references ) observables.append(observable) elif match[RESULT_FORMAT_TYPE] == ENTITY_CLASS: entities.append(match[RESULT_FORMAT_MATCH]) else: self.helper.log_info("Odd data received: {}".format(match)) return observables, entities def _process_parsed_objects( self, entity: Dict, observables: List, entities: List, bypass_validation: bool, file_name: str, ) -> int: if len(observables) == 0 and len(entities) == 0: return 0 if entity is not None and entity["entity_type"] == "Report": report = Report( id=entity["standard_id"], name=entity["name"], description=entity["description"], published=self.helper.api.stix2.format_date(entity["created"]), report_types=entity["report_types"], object_refs=observables + entities, allow_custom=True, ) observables.append(report) elif entity is not None: # TODO, relate all object to the entity entity_stix_bundle = self.helper.api.stix2.export_entity( entity["entity_type"], entity["id"]) observables = observables + entity_stix_bundle["objects"] else: timestamp = int(time.time()) now = datetime.utcfromtimestamp(timestamp) report = Report( name=file_name, description="Automatic import", published=now, report_types=["threat-report"], object_refs=observables + entities, allow_custom=True, ) observables.append(report) bundles_sent = [] if len(observables) > 0: bundle = Bundle(objects=observables, allow_custom=True).serialize() bundles_sent = self.helper.send_stix2_bundle( bundle=bundle, update=True, bypass_validation=bypass_validation, file_name=file_name + ".json", entity_id=entity["id"] if entity is not None else None, ) # len() - 1 because the report update increases the count by one return len(bundles_sent) - 1
class IpInfoConnector: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.token = get_config_variable("IPINFO_TOKEN", ["ipinfo", "token"], config) self.max_tlp = get_config_variable("IPINFO_MAX_TLP", ["ipinfo", "max_tlp"], config) def _generate_stix_bundle(self, country, city, loc, observable_id): # Generate stix bundle country_location = Location( id=OpenCTIStix2Utils.generate_random_stix_id("location"), name=country.name, country=country.official_name if hasattr(country, "official_name") else country.name, custom_properties={ "x_opencti_location_type": "Country", "x_opencti_aliases": [ country.official_name if hasattr(country, "official_name") else country.name ], }, ) loc_split = loc.split(",") city_location = Location( id=OpenCTIStix2Utils.generate_random_stix_id("location"), name=city, country=country.official_name if hasattr(country, "official_name") else country.name, latitude=loc_split[0], longitude=loc_split[1], custom_properties={"x_opencti_location_type": "City"}, ) city_to_country = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id("relationship"), relationship_type="located-at", source_ref=city_location.id, target_ref=country_location.id, ) observable_to_city = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id("relationship"), relationship_type="located-at", source_ref=observable_id, target_ref=city_location.id, confidence=self.helper.connect_confidence_level, ) return Bundle( objects=[ country_location, city_location, city_to_country, observable_to_city, ], allow_custom=True, ).serialize() def _process_message(self, data): entity_id = data["entity_id"] observable = self.helper.api.stix_cyber_observable.read(id=entity_id) # Extract TLP tlp = "TLP:WHITE" for marking_definition in observable["objectMarking"]: if marking_definition["definition_type"] == "TLP": tlp = marking_definition["definition"] if not OpenCTIConnectorHelper.check_max_tlp(tlp, self.max_tlp): raise ValueError( "Do not send any data, TLP of the observable is greater than MAX TLP" ) # Extract IP from entity data observable_id = observable["standard_id"] observable_value = observable["value"] # Get the geo loc from the API api_url = "https://ipinfo.io/" + observable_value + "/json/?token=" + self.token response = requests.request( "GET", api_url, headers={ "accept": "application/json", "content-type": "application/json" }, ) json_data = response.json() country = pycountry.countries.get(alpha_2=json_data["country"]) if country is None: raise ValueError( "IpInfo was not able to find a country for this IP address") bundle = self._generate_stix_bundle(country, json_data["city"], json_data["loc"], observable_id) bundles_sent = self.helper.send_stix2_bundle(bundle) return "Sent " + str( len(bundles_sent)) + " stix bundle(s) for worker import" # Start the main loop def start(self): self.helper.listen(self._process_message)
class FireEye: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.fireeye_api_url = get_config_variable("FIREEYE_API_URL", ["fireeye", "api_url"], config) self.fireeye_api_v3_public = get_config_variable( "FIREEYE_API_V3_PUBLIC", ["fireeye", "api_v3_public"], config) self.fireeye_api_v3_secret = get_config_variable( "FIREEYE_API_V3_SECRET", ["fireeye", "api_v3_secret"], config) self.fireeye_collections = get_config_variable( "FIREEYE_COLLECTIONS", ["fireeye", "collections"], config).split(",") self.fireeye_import_start_date = get_config_variable( "FIREEYE_IMPORT_START_DATE", ["fireeye", "import_start_date"], config, ) self.fireeye_interval = get_config_variable("FIREEYE_INTERVAL", ["fireeye", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.added_after = parse(self.fireeye_import_start_date).timestamp() self.identity = self.helper.api.identity.create( type="Organization", name="FireEye, Inc.", description= "FireEye is a publicly traded cybersecurity company headquartered in Milpitas, California. It has been involved in the detection and prevention of major cyber attacks. It provides hardware, software, and services to investigate cybersecurity attacks, protect against malicious software, and analyze IT security risks. FireEye was founded in 2004.", ) self.marking = self.helper.api.marking_definition.create( definition_type="COMMERCIAL", definition="FIREEYE", x_opencti_order=99, x_opencti_color="#a01526", ) # Init variables self.auth_token = None self._get_token() def get_interval(self): return int(self.fireeye_interval) * 60 def _get_token(self): r = requests.post( self.fireeye_api_url + "/token", auth=HTTPBasicAuth(self.fireeye_api_v3_public, self.fireeye_api_v3_secret), data={"grant_type": "client_credentials"}, ) if r.status_code != 200: raise ValueError("FireEye Authentication failed") data = r.json() self.auth_token = data.get("access_token") def _search(self, stix_id, retry=False): time.sleep(3) self.helper.log_info("Searching for " + stix_id) headers = { "authorization": "Bearer " + self.auth_token, "accept": "application/vnd.oasis.stix+json; version=2.1", "x-app-name": "opencti-connector-4.3.0", } body = """ { "queries": [ { "type": "ENTITY_TYPE", "query": "id = 'ENTITY_ID'" } ], "include_connected_objects": false } """ entity_type = stix_id.split("--")[0] if entity_type not in searchable_types: return None body = body.replace("ENTITY_TYPE", entity_type).replace("ENTITY_ID", stix_id) r = requests.post(self.fireeye_api_url + "/collections/search", data=body, headers=headers) if r.status_code == 200: return r elif (r.status_code == 401 or r.status_code == 403) and not retry: self._get_token() return self._search(stix_id, True) elif r.status_code == 204 or r.status_code == 205: return None elif r.status_code == 401 or r.status_code == 403: raise ValueError("Query failed, permission denied") else: print(r) raise ValueError("An unknown error occurred") def _query(self, url, retry=False): headers = { "authorization": "Bearer " + self.auth_token, "accept": "application/vnd.oasis.stix+json; version=2.1", "x-app-name": "opencti-connector-4.3.0", } r = requests.get(url, headers=headers) if r.status_code == 200: return r elif (r.status_code == 401 or r.status_code == 403) and not retry: self._get_token() return self._query(url, True) elif r.status_code == 401 or r.status_code == 403: raise ValueError("Query failed, permission denied") else: raise ValueError("An unknown error occurred") def _send_entity(self, bundle, work_id): if "objects" in bundle and len(bundle) > 0: final_objects = [] for stix_object in bundle["objects"]: if stix_object["type"] == "threat-actor": stix_object["type"] = "intrusion-set" stix_object["id"] = stix_object["id"].replace( "threat-actor", "intrusion-set") if "created_by_ref" not in stix_object: stix_object["created_by_ref"] = self.identity[ "standard_id"] if stix_object["type"] != "marking-definition": stix_object["object_marking_refs"] = [ "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82" ] stix_object["object_marking_refs"].append( self.marking["standard_id"]) final_objects.append(stix_object) final_bundle = {"type": "bundle", "objects": final_objects} self.helper.send_stix2_bundle( json.dumps(final_bundle), update=self.update_existing_data, work_id=work_id, ) def _import_collection(self, collection, last_id_modified_timestamp=None, last_id=None, work_id=None): have_next_page = True url = None last_object = None while have_next_page: if url is None: if last_id_modified_timestamp is not None: url = (self.fireeye_api_url + "/collections/" + collection + "/objects" + "?added_after=" + str(self.added_after) + "&length=100" + "&last_id_modified_timestamp=" + str(last_id_modified_timestamp)) else: url = (self.fireeye_api_url + "/collections/" + collection + "/objects" + "?added_after=" + str(self.added_after) + "&length=100") result = self._query(url) parsed_result = json.loads(result.text) if "objects" in parsed_result and len(parsed_result) > 0: last_object = parsed_result["objects"][-1] object_ids = [ stix_object["id"] for stix_object in parsed_result["objects"] ] if last_object["id"] != last_id: final_objects = [] for stix_object in parsed_result["objects"]: if stix_object["type"] == "threat-actor": stix_object["type"] = "intrusion-set" stix_object["id"] = stix_object["id"].replace( "threat-actor", "intrusion-set") if stix_object["type"] == "relationship": # If the source_ref is not in the current bundle if stix_object["source_ref"] not in object_ids: # Search entity in OpenCTI opencti_entity = ( self.helper.api.stix_domain_object.read( id=stix_object["source_ref"])) # If the entity is not found if opencti_entity is None: # Search the entity in FireEye fireeye_entity = self._search( stix_object["source_ref"]) # If the entity is found if fireeye_entity is not None: fireeye_entity_decoded = json.loads( fireeye_entity.text) # Send the entity before this bundle self._send_entity( fireeye_entity_decoded, work_id) stix_object["source_ref"] = stix_object[ "source_ref"].replace("threat-actor", "intrusion-set") # Search if the entity is not in bundle if stix_object["target_ref"] not in object_ids: opencti_entity = ( self.helper.api.stix_domain_object.read( id=stix_object["target_ref"])) if opencti_entity is None: fireeye_entity = self._search( stix_object["target_ref"]) if fireeye_entity is not None: fireeye_entity_decoded = json.loads( fireeye_entity.text) self._send_entity( fireeye_entity_decoded, work_id) stix_object["target_ref"] = stix_object[ "target_ref"].replace("threat-actor", "intrusion-set") if ("object_refs" in stix_object and len(stix_object["object_refs"]) > 0): for object_ref in stix_object["object_refs"]: if object_ref not in object_ids: opencti_entity = (self.helper.api. stix_domain_object.read( id=object_ref)) if opencti_entity is None: fireeye_entity = self._search( object_ref) if fireeye_entity is not None: fireeye_entity_decoded = json.loads( fireeye_entity.text) self._send_entity( fireeye_entity_decoded, work_id) if "created_by_ref" not in stix_object: stix_object["created_by_ref"] = self.identity[ "standard_id"] if stix_object["type"] != "marking-definition": stix_object["object_marking_refs"] = [ "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82" ] stix_object["object_marking_refs"].append( self.marking["standard_id"]) final_objects.append(stix_object) final_bundle = {"type": "bundle", "objects": final_objects} self.helper.send_stix2_bundle( json.dumps(final_bundle), update=self.update_existing_data, work_id=work_id, ) headers = result.headers if "Link" in headers: have_next_page = True link = headers["Link"].split(";") url = link[0][1:-1] last_id_modified_timestamp = parse_qs( urlparse( url).query)["last_id_modified_timestamp"][0] else: have_next_page = False else: have_next_page = False return { "last_id_modified_timestamp": last_id_modified_timestamp, "last_id": last_object["id"] if "id" in last_object else None, } def run(self): while True: try: self.helper.log_info("Synchronizing with FireEye API...") timestamp = int(time.time()) now = datetime.datetime.utcfromtimestamp(timestamp) friendly_name = "FireEye run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) current_state = self.helper.get_state() if (current_state is None or "last_id_modified_timestamp" not in current_state): self.helper.set_state({ "last_id_modified_timestamp": { "indicators": None, "reports": None, }, "last_id": { "indicators": None, "reports": None, }, }) current_state = self.helper.get_state() last_id_modified_timestamp = current_state[ "last_id_modified_timestamp"] last_id = current_state["last_id"] if "indicators" in self.fireeye_collections: self.helper.log_info( "Get indicators created after " + str(last_id_modified_timestamp["indicators"])) indicators_last = self._import_collection( "indicators", last_id_modified_timestamp["indicators"], last_id["indicators"], work_id, ) current_state = self.helper.get_state() self.helper.set_state({ "last_id_modified_timestamp": { "indicators": indicators_last["last_id_modified_timestamp"], "reports": current_state["last_id_modified_timestamp"] ["reports"], }, "last_id": { "indicators": indicators_last["last_id"], "reports": current_state["last_id"]["reports"], }, }) if "reports" in self.fireeye_collections: self.helper.log_info( "Get reports created after " + str(last_id_modified_timestamp["reports"])) reports_last = self._import_collection( "reports", last_id_modified_timestamp["reports"], last_id["reports"], work_id, ) current_state = self.helper.get_state() self.helper.set_state({ "last_id_modified_timestamp": { "indicators": current_state["last_id_modified_timestamp"] ["indicators"], "reports": reports_last["last_id_modified_timestamp"], }, "last_id": { "indicators": current_state["last_id"]["indicators"], "reports": reports_last["last_id"], }, }) message = "End of synchronization" self.helper.api.work.to_processed(work_id, message) self.helper.log_info(message) time.sleep(self.get_interval()) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class HybridAnalysis: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.api_key = get_config_variable("HYBRID_ANALYSIS_TOKEN", ["hybrid_analysis", "api_key"], config) self.environment_id = get_config_variable( "HYBRID_ANALYSIS_ENVIRONMENT_ID", ["hybrid_analysis", "environment_id"], config, True, 110, ) self.max_tlp = get_config_variable("HYBRID_ANALYSIS_MAX_TLP", ["hybrid_analysis", "max_tlp"], config) self.api_url = "https://www.hybrid-analysis.com/api/v2" self.headers = { "api-key": self.api_key, "user-agent": "OpenCTI Hybrid Analysis Connector - Version 4.5.5", "accept": "application/json", } self.identity = self.helper.api.identity.create( type="Organization", name="Hybrid Analysis", description="Hybrid Analysis Sandbox.", )["standard_id"] self._CONNECTOR_RUN_INTERVAL_SEC = 60 * 60 def _send_knowledge(self, observable, report): bundle_objects = [] final_observable = observable if observable["entity_type"] in ["StixFile", "Artifact"]: final_observable = self.helper.api.stix_cyber_observable.update_field( id=final_observable["id"], key="hashes.MD5", value=report["md5"]) final_observable = self.helper.api.stix_cyber_observable.update_field( id=final_observable["id"], key="hashes.SHA-1", value=report["sha1"]) final_observable = self.helper.api.stix_cyber_observable.update_field( id=final_observable["id"], key="hashes.SHA-256", value=report["sha256"], ) if "name" not in final_observable or final_observable[ "name"] is None: self.helper.api.stix_cyber_observable.update_field( id=final_observable["id"], key="x_opencti_additional_names", value=report["submit_name"], operation="add", ) if final_observable["entity_type"] == "StixFile": self.helper.api.stix_cyber_observable.update_field( id=final_observable["id"], key="size", value=str(report["size"]), ) self.helper.api.stix_cyber_observable.update_field( id=final_observable["id"], key="x_opencti_score", value=str(report["threat_score"]), ) # Create external reference external_reference = self.helper.api.external_reference.create( source_name="Hybrid Analysis", url="https://www.hybrid-analysis.com/sample/" + report["sha256"], description="Hybrid Analysis Report", ) self.helper.api.stix_cyber_observable.add_external_reference( id=final_observable["id"], external_reference_id=external_reference["id"], ) # Create tags for tag in report["type_short"]: tag_ha = self.helper.api.label.create(value=tag, color="#0059f7") self.helper.api.stix_cyber_observable.add_label( id=final_observable["id"], label_id=tag_ha["id"]) # Attach the TTPs for tactic in report["mitre_attcks"]: if (tactic["malicious_identifiers_count"] > 0 or tactic["suspicious_identifiers_count"] > 0): attack_pattern = AttackPattern( id=OpenCTIStix2Utils.generate_random_stix_id( "attack-pattern"), created_by_ref=self.identity, name=tactic["technique"], custom_properties={ "x_mitre_id": tactic["attck_id"], }, object_marking_refs=[TLP_WHITE], ) relationship = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship"), relationship_type="uses", created_by_ref=self.identity, source_ref=final_observable["standard_id"], target_ref=attack_pattern.id, object_marking_refs=[TLP_WHITE], ) bundle_objects.append(attack_pattern) bundle_objects.append(relationship) # Attach the domains for domain in report["domains"]: domain_stix = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable"), key="Domain-Name.value", value=domain, created_by_ref=self.identity, object_marking_refs=[TLP_WHITE], ) relationship = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id("relationship"), relationship_type="communicates-with", created_by_ref=self.identity, source_ref=final_observable["standard_id"], target_ref=domain_stix.id, object_marking_refs=[TLP_WHITE], ) bundle_objects.append(domain_stix) bundle_objects.append(relationship) # Attach the IP addresses for host in report["hosts"]: host_stix = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable"), key=self.detect_ip_version(host) + ".value", value=host, created_by_ref=self.identity, object_marking_refs=[TLP_WHITE], ) relationship = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id("relationship"), relationship_type="communicates-with", created_by_ref=self.identity, source_ref=final_observable["standard_id"], target_ref=host_stix.id, object_marking_refs=[TLP_WHITE], ) bundle_objects.append(host_stix) bundle_objects.append(relationship) # Attach other files for file in report["extracted_files"]: if file["threat_level"] > 0: file_stix = File( id=OpenCTIStix2Utils.generate_random_stix_id("file"), hashes={ "MD5": file["md5"], "SHA-1": file["sha1"], "SHA-256": file["sha256"], }, size=file["size"], name=file["name"], custom_properties={"x_opencti_labels": file["type_tags"]}, created_by_ref=self.identity, object_marking_refs=[TLP_WHITE], ) relationship = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship"), relationship_type="drops", created_by_ref=self.identity, source_ref=final_observable["standard_id"], target_ref=file_stix.id, ) bundle_objects.append(file_stix) bundle_objects.append(relationship) for tactic in report["mitre_attcks"]: if (tactic["malicious_identifiers_count"] > 0 or tactic["suspicious_identifiers_count"] > 0): attack_pattern = AttackPattern( id=OpenCTIStix2Utils.generate_random_stix_id( "attack-pattern"), created_by_ref=self.identity, name=tactic["technique"], custom_properties={ "x_mitre_id": tactic["attck_id"], }, ) relationship = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship"), relationship_type="uses", created_by_ref=self.identity, source_ref=final_observable["standard_id"], target_ref=attack_pattern.id, ) bundle_objects.append(attack_pattern) bundle_objects.append(relationship) if len(bundle_objects) > 0: bundle = Bundle(objects=bundle_objects).serialize() bundles_sent = self.helper.send_stix2_bundle(bundle) return ("Sent " + str(len(bundles_sent)) + " stix bundle(s) for worker import") else: return "Nothing to attach" def _submit_url(self, observable): self.helper.log_info("Observable is a URL, triggering the sandbox...") values = { "url": observable["observable_value"], "environment_id": self.environment_id, } r = requests.post( self.api_url + "/submit/url", headers=self.headers, data=values, ) if r.status_code > 299: raise ValueError(r.text) result = r.json() job_id = result["job_id"] state = "IN_QUEUE" self.helper.log_info("Analysis in progress...") while state == "IN_QUEUE" or state == "IN_PROGRESS": r = requests.get( self.api_url + "/report/" + job_id + "/state", headers=self.headers, ) if r.status_code > 299: raise ValueError(r.text) result = r.json() state = result["state"] time.sleep(30) if state == "ERROR": raise ValueError(result["error"]) r = requests.get( self.api_url + "/report/" + job_id + "/summary", headers=self.headers, ) if r.status_code > 299: raise ValueError(r.text) result = r.json() self.helper.log_info("Analysis done, attaching knowledge...") return self._send_knowledge(observable, result) def _trigger_sandbox(self, observable): self.helper.log_info("File not found in HA, triggering the sandbox...") file_name = observable["importFiles"][0]["name"] file_uri = observable["importFiles"][0]["id"] file_content = self.helper.api.fetch_opencti_file( self.api_url + file_uri, True) # Write the file f = open(file_name, "wb") f.write(file_content) f.close() files = {"file": open(file_name, "rb")} values = {"environment_id": self.environment_id} r = requests.post( self.api_url + "/submit/file", headers=self.headers, files=files, data=values, ) os.remove(file_name) if r.status_code > 299: raise ValueError(r.text) result = r.json() job_id = result["job_id"] state = "IN_QUEUE" self.helper.log_info("Analysis in progress...") while state == "IN_QUEUE" or state == "IN_PROGRESS": r = requests.get( self.api_url + "/report/" + job_id + "/state", headers=self.headers, ) if r.status_code > 299: raise ValueError(r.text) result = r.json() state = result["state"] time.sleep(30) if state == "ERROR": raise ValueError(result["error"]) r = requests.get( self.api_url + "/report/" + job_id + "/summary", headers=self.headers, ) if r.status_code > 299: raise ValueError(r.text) result = r.json() self.helper.log_info("Analysis done, attaching knowledge...") return self._send_knowledge(observable, result) def _process_observable(self, observable): self.helper.log_info("Processing the observable " + observable["observable_value"]) # If File or Artifact result = [] if observable["entity_type"] in ["StixFile", "Artifact"]: # First, check if the file is present is HA values = {"hash": observable["observable_value"]} r = requests.post( self.api_url + "/search/hash", headers=self.headers, data=values, ) if r.status_code > 299: raise ValueError(r.text) result = r.json() if len(result) > 0: # One report is found self.helper.log_info("Already found in HA, attaching knowledge...") return self._send_knowledge(observable, result[0]) # If URL if observable["entity_type"] in [ "Url", "Domain-Name", "X-OpenCTI-Hostname" ]: return self._submit_url(observable) # If no file if "importFiles" not in observable or len( observable["importFiles"]) == 0: return "Observable not found and no file to upload in the sandbox" return self._trigger_sandbox(observable) def _process_message(self, data): entity_id = data["entity_id"] observable = self.helper.api.stix_cyber_observable.read(id=entity_id) if observable is None: raise ValueError( "Observable not found " "(may be linked to data seggregation, check your group and permissions)" ) # Extract TLP tlp = "TLP:WHITE" for marking_definition in observable["objectMarking"]: if marking_definition["definition_type"] == "TLP": tlp = marking_definition["definition"] if not OpenCTIConnectorHelper.check_max_tlp(tlp, self.max_tlp): raise ValueError( "Do not send any data, TLP of the observable is greater than MAX TLP" ) return self._process_observable(observable) # Start the main loop def start(self): self.helper.listen(self._process_message) def detect_ip_version(self, value): if len(value) > 16: return "IPv6-Addr" else: return "IPv4-Addr"
class CyberThreatCoalition: _OPENCTI_TYPE = { "domain": "[domain-name:value = '{}']", "ipv4-addr": "[ipv4-addr:value = '{}']", "file-sha256": "[file:hashes.SHA256 = '{}']", "file-sha1": "[file:hashes.SHA1 = '{}']", "file-md5": "[file:hashes.MD5 = '{}']", "url": "[url:value = '{}']", } _STATE_LAST_RUN = "last_run" def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.cyber_threat_coalition_interval = get_config_variable( "CYBER_THREAT_COALITION_INTERVAL", ["cyber-threat-coalition", "interval_sec"], config, True, ) self.cyber_threat_coalition_base_url = get_config_variable( "CYBER_THREAT_COALITION_BASE_URL", ["cyber-threat-coalition", "base_url"], config, False, ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self) -> int: return int(self.cyber_threat_coalition_interval) @staticmethod def get_hash_type(hash_value): if re.match(r"^[0-9a-fA-F]{32}$", hash_value): return "file-md5" elif re.match(r"^[0-9a-fA-F]{40}$", hash_value): return "file-sha1" elif re.match(r"^[0-9a-fA-F]{64}$", hash_value): return "file-sha256" def fetch_and_send(self): bundle_objects = list() # create an identity for the coalition team organization = stix2.Identity( name="Cyber Threat Coalition Team", identity_class="organization", description="Team of Experts collecting and sharing pandemic related " "cyber threat intelligence during the COVID-19 crisis time", ) # add organization in bundle bundle_objects.append(organization) report_object_refs = list() for collection in ["domain", "ip", "url", "hash"]: # fetch backlist url = self.cyber_threat_coalition_base_url + "/" + str(collection) + ".txt" response = requests.get(url=url) if response.status_code != 200: raise Exception( "Unable to fetch {0} blacklist, server returned status: {1}", collection, response.status_code, ) opencti_type = None pattern_type = "stix" tags = [{"tag_type": "Event", "value": "COVID-19", "color": "#fc036b"}] # parse content for data in response.iter_lines(decode_unicode=True): if data and not data.startswith("#"): if collection == "domain": opencti_type = "domain" elif collection == "ip": opencti_type = "ipv4-addr" elif collection == "url": opencti_type = "url" data = urllib.parse.quote(data, "/:") elif collection == "hash": opencti_type = self.get_hash_type(data) try: indicator = stix2.Indicator( name=data, pattern=self._OPENCTI_TYPE[opencti_type].format(data), labels=["malicious-activity"], created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], custom_properties={ CustomProperties.OBSERVABLE_TYPE: opencti_type, CustomProperties.OBSERVABLE_VALUE: data, CustomProperties.PATTERN_TYPE: pattern_type, CustomProperties.TAG_TYPE: tags, }, ) except Exception as ex: self.helper.log_error( "an exception occurred while converting data to STIX indicator " "for data.value: {} , skipping IOC, exception: {}".format( data, ex ) ) continue # add indicator in bundle and report_refs bundle_objects.append(indicator) report_object_refs.append(indicator["id"]) # create a global threat report report_uuid = "report--552b3ae6-8522-409d-8b72-a739bc1926aa" report_external_reference = stix2.ExternalReference( source_name="Cyber Threat Coalition", url="https://www.cyberthreatcoalition.org", external_id="COVID19-CTC", ) stix_report = stix2.Report( id=report_uuid, name="COVID-19 Cyber Threat Coalition (CTC) BlackList", type="report", description="This report represents the whole COVID-19 CTC blacklist.", published=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], labels=["threat-report"], external_references=[report_external_reference], object_refs=report_object_refs, custom_properties={CustomProperties.TAG_TYPE: tags,}, ) # add report in bundle bundle_objects.append(stix_report) # create stix bundle bundle = stix2.Bundle(objects=bundle_objects) # send data self.helper.send_stix2_bundle( bundle=bundle.serialize(), update=self.update_existing_data ) def _load_state(self) -> Dict[str, Any]: current_state = self.helper.get_state() if not current_state: return {} return current_state def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool: if last_run is None: return True time_diff = current_time - last_run return time_diff >= self.get_interval() @staticmethod def _get_state_value( state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None ) -> Any: if state is not None: return state.get(key, default) return default @staticmethod def _current_unix_timestamp() -> int: return int(time.time()) def run(self): self.helper.log_info("Fetching Cyber Threat Coalition vetted blacklists...") while True: try: timestamp = self._current_unix_timestamp() current_state = self._load_state() self.helper.log_info(f"Loaded state: {current_state}") last_run = self._get_state_value(current_state, self._STATE_LAST_RUN) if self._is_scheduled(last_run, timestamp): # fetch data and send as stix bundle self.fetch_and_send() new_state = current_state.copy() new_state[self._STATE_LAST_RUN] = self._current_unix_timestamp() self.helper.log_info(f"Storing new state: {new_state}") self.helper.set_state(new_state) self.helper.log_info( f"State stored, next run in: {self.get_interval()} seconds" ) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( f"Connector will not run, next run in: {new_interval} seconds" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as ex: self.helper.log_error(str(ex)) time.sleep(60)
class OpenCTI: octi_sectors = {} def __init__(self, config, dryrun): self.config = config['OpenCTI'] self.dryrun = dryrun self._get_octi_sectors() self.connector_config = { 'name': self.config['connector_name'], 'confidence_level': 3, 'entities': 'report, intrusion-set, identity', 'interval': 0, 'log_level': 'info' } confyml_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' confyml = yaml.load( open(confyml_path), Loader=yaml.FullLoader) if os.path.isfile(confyml_path) else {} self.opencti_connector_helper = OpenCTIConnectorHelper(confyml) def _get_octi_sectors(self): res = requests.get(self.config['sectors_url']) if res.status_code != 200: printer.error("Error getting sectors") sys.exit(1) for sector in res.json()['objects']: if 'name' in sector: self.octi_sectors[sector['name'].upper()] = sector['id'] def resolve_type(self, type): types = { 'filehash-md5': 'file-md5', 'filehash-sha1': 'file-sha1', 'filehash-sha256': 'file-sha256', 'filepath': 'file-name', 'ipv4': 'ipv4-addr', 'ipv6': 'ipv6-addr', 'hostname': 'domain', 'domain': 'domain', 'url': 'url' } if type in types: return types[type] def process_reports(self, reports): if reports is None: printer.error("No results") return for report in reports: name = report["name"] id = report["id"] stix2_objects = [] stix2_object_refs = [] # FFS AV, consistency! if 'tlp' in report: tlp_id = REF_TLPS[report['tlp'].upper()] elif 'TLP' in report: tlp_id = REF_TLPS[report['TLP'].upper()] else: tlp_id = REF_TLPS['WHITE'] sectors = report['industries'] if sectors: unmatched_sectors = [] added_sector = False for sector in [html.unescape(x.upper()) for x in sectors]: sector_name = None sector_id = None if sector in SECTOR_MAPPINGS: # sector_ids.append(self.octi_sectors[SECTOR_MAPPINGS[sector]]) sector_name = SECTOR_MAPPINGS[sector] try: sector_id = self.octi_sectors[ SECTOR_MAPPINGS[sector]] except Exception as e: printer.error(e) continue else: printer.debug(f"Looking for sector {sector}") match = difflib.get_close_matches( sector, self.octi_sectors.keys(), 1) if not len(match): printer.error( f"Unable to determine a matching sector for {sector}" ) unmatched_sectors.append(sector) continue # sector_ids.append(self.octi_sectors[match[0]]) sector_name = match[0] sector_id = self.octi_sectors[match[0]] if sector_name is not None: s = stix2.Identity(id=sector_id, name=sector_name, identity_class='class', custom_properties={ 'x_opencti_identity_type': 'sector' }) printer.debug(f"Adding sector {sector_name}") stix2_objects.append(s) stix2_object_refs.append(s) added_sector = True if not added_sector: printer.warn("Adding 'UNKNOWN' placeholder sector") s = stix2.Identity(id=self.octi_sectors["UNKNOWN"], name="Unknown", identity_class='class', custom_properties={ 'x_opencti_identity_type': 'sector' }) stix2_objects.append(s) stix2_object_refs.append(s) description = report['description'] if len(unmatched_sectors): description = description + "\n\n###\nUnable to find a match for the following sectors, " \ "please review manually:\n - " + '\n - '.join(unmatched_sectors) printer.info(f"Generating STIX2 for {name} ({id})") author = stix2.Identity(name=report['author_name'], identity_class='organization') stix2_objects.append(author) adversary = None if report['adversary']: printer.debug("Adding adversary {}".format( report['adversary'])) adversary = stix2.IntrusionSet(name=report['adversary']) stix2_object_refs.append(adversary) stix2_objects.append(adversary) if report['targeted_countries']: for country in report['targeted_countries']: printer.debug(f"Adding country {country}") c = stix2.Identity(name=country, identity_class='organization', custom_properties={ 'x_opencti_identity_type': 'country' }) stix2_objects.append(c) stix2_object_refs.append(c) external_refs = [] for eref in report['references']: external_refs.append( stix2.ExternalReference(source_name=tldextract.extract( eref).registered_domain, url=eref)) indicators = report["indicators"] if indicators: for indicator in indicators: resolved_type = self.resolve_type( indicator["type"].lower()) if resolved_type != None and indicator["is_active"]: observable_type = resolved_type observable_value = indicator["indicator"] pattern_type = 'stix' try: if observable_type in PATTERNTYPES: pattern_type = observable_type elif observable_type not in OPENCTISTIX2: printer.info("Not in stix2 dict") else: if 'transform' in OPENCTISTIX2[ observable_type]: if OPENCTISTIX2[observable_type][ 'transform'][ 'operation'] == 'remove_string': observable_value = observable_value.replace( OPENCTISTIX2[observable_type] ['transform']['value'], '') lhs = stix2.ObjectPath( OPENCTISTIX2[observable_type]['type'], OPENCTISTIX2[observable_type]['path']) observable_value = stix2.ObservationExpression( stix2.EqualityComparisonExpression( lhs, observable_value)) except Exception as e: printer.error(e) printer.info( "Could not determine suitable pattern") try: indicator_obj = stix2.Indicator( name=indicator["indicator"], description=indicator["description"], pattern=str(observable_value), valid_from=indicator["created"], labels=['malicious-activity'], created_by_ref=author, object_marking_refs=[tlp_id], custom_properties={ 'x_opencti_observable_type': resolved_type, 'x_opencti_observable_value': indicator["indicator"], 'x_opencti_pattern_type': pattern_type }) stix2_object_refs.append(indicator_obj) stix2_objects.append(indicator_obj) except Exception as e: printer.error(e) printer.info("Couldn't fetch indicator") else: printer.error("No indicators") report = stix2.Report(name=name, description=description, created_by_ref=author, labels=['threat-report'], published=report['created'], created=report['created'], modified=report['modified'], object_refs=stix2_object_refs, object_marking_refs=[tlp_id], external_references=external_refs) stix2_objects.append(report) bundle = stix2.Bundle(stix2_objects).serialize() if not self.dryrun: self.opencti_connector_helper.send_stix2_bundle( bundle, None, True, False) printer.info("Sending to OpenCTI") #printer.debug(str(bundle)) else: printer.debug(f"No sectors, disregarding '{name}'")
class OpenCTI: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader ) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) # Extra config self.opencti_sectors_file_url = get_config_variable( 'CONFIG_SECTORS_FILE_URL', ['config', 'sectors_file_url'], config) self.opencti_geography_file_url = get_config_variable( 'CONFIG_GEOGRAPHY_FILE_URL', ['config', 'geography_file_url'], config) self.opencti_interval = get_config_variable('CONFIG_INTERVAL', ['config', 'interval'], config, True) self.update_existing_data = get_config_variable( 'CONNECTOR_UPDATE_EXISTING_DATA', ['connector', 'update_existing_data'], config) def get_interval(self): return int(self.opencti_interval) * 60 * 60 * 24 def run(self): self.helper.log_info('Fetching OpenCTI datasets...') while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and 'last_run' in current_state: last_run = current_state['last_run'] self.helper.log_info('Connector last run: ' + datetime.utcfromtimestamp(last_run). strftime('%Y-%m-%d %H:%M:%S')) else: last_run = None self.helper.log_info('Connector has never run') # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.opencti_interval) - 1) * 60 * 60 * 24)): sectors_data = urllib.request.urlopen( self.opencti_sectors_file_url).read() self.helper.send_stix2_bundle(sectors_data.decode('utf-8'), self.helper.connect_scope, self.update_existing_data) geography_data = urllib.request.urlopen( self.opencti_geography_file_url).read() self.helper.send_stix2_bundle( geography_data.decode('utf-8'), self.helper.connect_scope, self.update_existing_data) # Store the current timestamp as a last run self.helper.log_info( 'Connector successfully run, storing last_run as ' + str(timestamp)) self.helper.set_state({'last_run': timestamp}) self.helper.log_info( 'Last_run stored, next run in: ' + str(round(self.get_interval() / 60 / 60 / 24, 2)) + ' days') time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( 'Connector will not run, next run in: ' + str(round(new_interval / 60 / 60 / 24, 2)) + ' days') time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info('Connector stop') exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class URLhaus: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.urlhaus_csv_url = get_config_variable("URLHAUS_CSV_URL", ["urlhaus", "csv_url"], config) self.urlhaus_import_offline = get_config_variable( "URLHAUS_IMPORT_OFFLINE", ["urlhaus", "import_offline"], config, False, True) self.urlhaus_interval = get_config_variable("URLHAUS_INTERVAL", ["urlhaus", "interval"], config, True) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.identity = self.helper.api.identity.create( type="Organization", name="Abuse.ch", description= "abuse.ch is operated by a random swiss guy fighting malware for non-profit, running a couple of projects helping internet service providers and network operators protecting their infrastructure from malware.", ) def get_interval(self): return int(self.urlhaus_interval) * 60 * 60 * 24 def next_run(self, seconds): return def run(self): self.helper.log_info("Fetching URLhaus dataset...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info("Connector last run: " + datetime.utcfromtimestamp(last_run). strftime("%Y-%m-%d %H:%M:%S")) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.urlhaus_interval) - 1) * 60 * 60 * 24)): self.helper.log_info("Connector will run!") now = datetime.utcfromtimestamp(timestamp) friendly_name = "URLhaus run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) try: response = urllib.request.urlopen( self.urlhaus_csv_url, context=ssl.create_default_context( cafile=certifi.where()), ) image = response.read() with open( os.path.dirname(os.path.abspath(__file__)) + "/data.csv", "wb", ) as file: file.write(image) fp = open( os.path.dirname(os.path.abspath(__file__)) + "/data.csv", "r", ) rdr = csv.reader(filter(lambda row: row[0] != "#", fp)) bundle_objects = [] for row in rdr: if row[3] == "online" or self.urlhaus_import_offline: external_reference = ExternalReference( source_name="Abuse.ch URLhaus", url=row[6], description="URLhaus repository URL", ) stix_observable = SimpleObservable( id=OpenCTIStix2Utils. generate_random_stix_id( "x-opencti-simple-observable"), key="Url.value", value=row[2], description=row[4], x_opencti_score=80, object_marking_refs=[TLP_WHITE], labels=row[5].split(","), created_by_ref=self. identity["standard_id"], x_opencti_create_indicator=True, external_references=[external_reference], ) bundle_objects.append(stix_observable) fp.close() bundle = Bundle(objects=bundle_objects).serialize() self.helper.send_stix2_bundle( bundle, entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) if os.path.exists( os.path.dirname(os.path.abspath(__file__)) + "/data.csv"): os.remove( os.path.dirname(os.path.abspath(__file__)) + "/data.csv") except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days") time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days") time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Cybercrimetracker: def __init__(self): # Instantiate the connector helper from config config_file_path = "{}/config.yml".format( os.path.dirname(os.path.abspath(__file__)) ) config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Connector Config self.confidence_level = get_config_variable( "CONNECTOR_CONFIDENCE_LEVEL", ["connector", "confidence_level"], config, isNumber=True, ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) # CYBERCRIME-TRACKER.NET Config self.feed_url = get_config_variable( "CYBERCRIMET_RACKER_FEED_URL", ["cybercrime-tracker", "feed_url"], config ) self.connector_tlp = get_config_variable( "CYBERCRIME_TRACKER_TLP", ["cybercrime-tracker", "tlp"], config ) self.create_indicators = get_config_variable( "CYBERCRIME_TRACKER_CREATE_INDICATORS", ["cybercrime-tracker", "create_indicators"], config, ) self.create_observables = get_config_variable( "CYBERCRIME_TRACKER_CREATE_OBSERVABLES", ["cybercrime-tracker", "create_observables"], config, ) self.interval = get_config_variable( "CYBERCRIMETRACKER_INTERVAL", ["cybercrime-tracker", "interval"], config, isNumber=True, ) @staticmethod def _time_to_datetime(input_date: time) -> datetime.datetime: return datetime.datetime( input_date.tm_year, input_date.tm_mon, input_date.tm_mday, input_date.tm_hour, input_date.tm_min, input_date.tm_sec, tzinfo=datetime.timezone.utc, ) def parse_feed_entry(self, entry): """ Parses an entry from the feed and returns a dict with: date: date in iso format type: name of the malware associated with the C2 server url: the url of the C2 ip: the IP address of the C2 ext_link: An external link to CYBERCRIME-TRACKER.NET with details Note: CYBERCRIME-TRACKER.NET does not provide the protocol in the url as such we always assume 'http'. """ parsed_entry = {} pattern = ( r"(?:\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type}" + r"\s+-%{GREEDYDATA}:\s+%{IP:ip}|" + r"\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type})" ) entry_summary = Grok(pattern).match(entry["summary"]) if entry_summary: parsed_entry["date"] = self._time_to_datetime(entry["published_parsed"]) parsed_entry["type"] = entry_summary["type"] parsed_entry["ext_link"] = entry["link"] parsed_entry["url"] = "http://{}".format(quote(entry["title"])) hostname = urlparse(parsed_entry["url"]).hostname if entry_summary["ip"] is None: parsed_entry["ip"] = hostname else: parsed_entry["ip"] = entry_summary["ip"] parsed_entry["domain"] = hostname self.helper.log_info("Parsed entry: {}".format(entry["title"])) return parsed_entry else: self.helper.log_error("Could not parse: {}".format(entry["title"])) return False def gen_indicator_pattern(self, parsed_entry): if "domain" in parsed_entry.keys(): indicator_pattern = ( "[ipv4-addr:value='{}'] ".format(parsed_entry["ip"]) + "AND [url:value='{}'] ".format(parsed_entry["url"]) + "AND [domain-name:value='{}']".format(parsed_entry["domain"]) ) else: indicator_pattern = "[ipv4-addr:value='{}'] ".format( parsed_entry["ip"] ) + "AND [url:value='{}']".format(parsed_entry["url"]) return indicator_pattern def run(self): self.helper.log_info("Fetching data CYBERCRIME-TRACKER.NET...") tlp = self.helper.api.marking_definition.read( filters=[ {"key": "definition", "values": "TLP:{}".format(self.connector_tlp)} ] ) while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info( "Connector last run: {}".format( datetime.datetime.utcfromtimestamp(last_run).strftime( "%Y-%m-%d %H:%M:%S" ) ) ) else: last_run = None self.helper.log_info("Connector has never run") # Run if it is the first time or we are past the interval if last_run is None or ((timestamp - last_run) > self.interval): self.helper.log_info("Connector will run!") now = datetime.datetime.utcfromtimestamp(timestamp) friendly_name = "MITRE run @ " + now.strftime("%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name ) # Get Feed Content feed = feedparser.parse(self.feed_url) self.helper.log_info( "Found: {} entries.".format(len(feed["entries"])) ) self.feed_summary = { "Source": feed["feed"]["title"], "Date": self._time_to_datetime( feed["feed"]["published_parsed"] ), "Details": feed["feed"]["subtitle"], "Link": feed["feed"]["link"], } # Create the bundle bundle_objects = list() organization = stix2.Identity( id=OpenCTIStix2Utils.generate_random_stix_id("identity"), name="CYBERCRIME-TRACKER.NET", identity_class="organization", description="Tracker collecting and sharing daily updates of C2 IPs/Urls. http://cybercrime-tracker.net", ) bundle_objects.append(organization) for entry in feed["entries"]: parsed_entry = self.parse_feed_entry(entry) external_reference = stix2.ExternalReference( source_name="{}".format(self.feed_summary["Source"]), url=parsed_entry["ext_link"], ) indicator_pattern = self.gen_indicator_pattern(parsed_entry) malware = stix2.Malware( id=OpenCTIStix2Utils.generate_random_stix_id("malware"), is_family=True, name=parsed_entry["type"], description="{} malware.".format(parsed_entry["type"]), ) bundle_objects.append(malware) indicator = None if self.create_indicators: indicator = stix2.Indicator( id=OpenCTIStix2Utils.generate_random_stix_id( "indicator" ), name=parsed_entry["url"], description="C2 URL for: {}".format( parsed_entry["type"] ), labels=["C2 Server"], pattern_type="stix", pattern=indicator_pattern, valid_from=parsed_entry["date"], created=parsed_entry["date"], modified=parsed_entry["date"], created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], external_references=[external_reference], custom_properties={ "x_opencti_main_observable_type": "Url" }, ) bundle_objects.append(indicator) relation = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), source_ref=indicator.id, target_ref=malware.id, relationship_type="indicates", start_time=self._time_to_datetime( entry["published_parsed"] ), stop_time=self._time_to_datetime( entry["published_parsed"] ) + datetime.timedelta(0, 3), description="URLs associated to: " + parsed_entry["type"], confidence=self.confidence_level, created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], created=parsed_entry["date"], modified=parsed_entry["date"], external_references=[external_reference], ) bundle_objects.append(relation) if self.create_observables: observable_url = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable" ), key="Url.value", labels=["C2 Server"], value=parsed_entry["url"], created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], external_references=[external_reference], ) bundle_objects.append(observable_url) observable_ip = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable" ), key="IPv4-Addr.value", labels=["C2 Server"], value=parsed_entry["ip"], created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], external_references=[external_reference], ) bundle_objects.append(observable_ip) observable_domain = None if "domain" in parsed_entry.keys(): observable_domain = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable" ), key="Domain-Name.value", labels=["C2 Server"], value=parsed_entry["domain"], created_by_ref=organization.id, object_marking_refs=[tlp["standard_id"]], external_references=[external_reference], ) bundle_objects.append(observable_domain) if indicator is not None: relationship_1 = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), relationship_type="based-on", created_by_ref=organization.id, source_ref=indicator.id, target_ref=observable_url.id, ) bundle_objects.append(relationship_1) relationship_2 = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), relationship_type="based-on", created_by_ref=organization.id, source_ref=indicator.id, target_ref=observable_ip.id, ) bundle_objects.append(relationship_2) if observable_domain is not None: relationship_3 = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), relationship_type="based-on", created_by_ref=organization.id, source_ref=indicator.id, target_ref=observable_domain.id, ) bundle_objects.append(relationship_3) # create stix bundle bundle = stix2.Bundle(objects=bundle_objects) # send data self.helper.send_stix2_bundle( bundle=bundle.serialize(), update=self.update_existing_data, work_id=work_id, ) # Store the current timestamp as a last run message = ( "Connector successfully run, storing last_run as: {}".format( str(timestamp) ) ) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: {} seconds.".format( str(round(self.interval, 2)) ) ) time.sleep(60) else: new_interval = self.interval - (timestamp - last_run) self.helper.log_info( "Connector will not run. \ Next run in: {} seconds.".format( str(round(new_interval, 2)) ) ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Mitre: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader ) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) # Extra config self.mitre_enterprise_file_url = os.getenv( 'MITRE_ENTERPRISE_FILE_URL' ) or config['mitre']['enterprise_file_url'] self.mitre_pre_attack_file_url = os.getenv( 'MITRE_PRE_ATTACK_FILE_URL' ) or config['mitre']['pre_attack_file_url'] self.mitre_interval = os.getenv( 'MITRE_INTERVAL') or config['mitre']['interval'] def get_interval(self): return int(self.mitre_interval) * 60 * 60 * 24 def run(self): self.helper.log_info('Fetching MITRE datasets...') while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and 'last_run' in current_state: last_run = current_state['last_run'] self.helper.log_info('Connector last run: ' + datetime.utcfromtimestamp(last_run). strftime('%Y-%m-%d %H:%M:%S')) else: last_run = None self.helper.log_info('Connector has never run') # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.mitre_interval) - 1) * 60 * 60 * 24)): self.helper.log_info('Connector will run!') enterprise_data = urllib.request.urlopen( self.mitre_enterprise_file_url).read().decode('utf-8') self.helper.send_stix2_bundle(enterprise_data, self.helper.connect_scope) pre_attack_data = urllib.request.urlopen( self.mitre_pre_attack_file_url).read() self.helper.send_stix2_bundle( pre_attack_data.decode('utf-8'), self.helper.connect_scope) # Store the current timestamp as a last run self.helper.log_info( 'Connector successfully run, storing last_run as ' + str(timestamp)) self.helper.set_state({'last_run': timestamp}) # Sleep all interval self.helper.log_info( 'Last_run stored, sleeping for: ' + str(round(self.get_interval() / 60 / 60 / 24, 2)) + ' days') time.sleep(self.get_interval()) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( 'Connector will not run, sleeping for: ' + str(round(new_interval / 60 / 60 / 24, 2)) + ' days') # Sleep only remaining time time.sleep(new_interval) except (KeyboardInterrupt, SystemExit): self.helper.log_info('Connector stop') exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(self.get_interval())
class Cve: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + '/config.yml' config = yaml.load(open(config_file_path), Loader=yaml.FullLoader ) if os.path.isfile(config_file_path) else {} self.helper = OpenCTIConnectorHelper(config) # Extra config self.cve_nvd_data_feed = get_config_variable('CVE_NVD_DATA_FEED', ['cve', 'nvd_data_feed'], config) self.cve_interval = get_config_variable('CVE_INTERVAL', ['cve', 'interval'], config, True) self.update_existing_data = get_config_variable( 'CONNECTOR_UPDATE_EXISTING_DATA', ['connector', 'update_existing_data'], config) def get_interval(self): return int(self.cve_interval) * 60 * 60 * 24 def run(self): self.helper.log_info('Fetching CVE knowledge...') while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and 'last_run' in current_state: last_run = current_state['last_run'] self.helper.log_info('Connector last run: ' + datetime.utcfromtimestamp(last_run). strftime('%Y-%m-%d %H:%M:%S')) else: last_run = None self.helper.log_info('Connector has never run') # If the last_run is more than interval-1 day if last_run is None or ((timestamp - last_run) > ( (int(self.cve_interval) - 1) * 60 * 60 * 24)): # Downloading json.gz file self.helper.log_info('Requesting the file') urllib.request.urlretrieve( self.cve_nvd_data_feed, os.path.dirname(os.path.abspath(__file__)) + '/data.json.gz') # Unzipping the file self.helper.log_info('Unzipping the file') with gzip.open('data.json.gz', 'rb') as f_in: with open('data.json', 'wb') as f_out: shutil.copyfileobj(f_in, f_out) # Converting the file to stix2 self.helper.log_info('Converting the file') convert('data.json', 'data-stix2.json') with open('data-stix2.json') as stix_json: contents = stix_json.read() self.helper.send_stix2_bundle( contents, self.helper.connect_scope, self.update_existing_data) # Remove files os.remove('data.json') os.remove('data.json.gz') os.remove('data-stix2.json') # Store the current timestamp as a last run self.helper.log_info( 'Connector successfully run, storing last_run as ' + str(timestamp)) self.helper.set_state({'last_run': timestamp}) self.helper.log_info( 'Last_run stored, next run in: ' + str(round(self.get_interval() / 60 / 60 / 24, 2)) + ' days') time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( 'Connector will not run, next run in: ' + str(round(new_interval / 60 / 60 / 24, 2)) + ' days') time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info('Connector stop') exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class ImportFilePdfObservables: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.create_indicator = get_config_variable( "PDF_OBSERVABLES_CREATE_INDICATOR", ["pdf_observables", "create_indicator"], config, ) def _process_message(self, data): file_path = data["file_path"] file_name = os.path.basename(file_path) work_context = data["work_context"] file_uri = self.helper.opencti_url + file_path self.helper.log_info("Importing the file " + file_uri) # Get the file file_content = self.helper.api.fetch_opencti_file(file_uri, True) # Write the file path = "/tmp/" + file_name f = open(path, "wb") f.write(file_content) f.close() # Parse bundle = { "type": "bundle", "id": "bundle--" + str(uuid.uuid4()), "spec_version": "2.0", "objects": [], } observed_data = { "id": "observed-data--" + str(uuid.uuid4()), "type": "observed-data", "x_opencti_indicator_create": self.create_indicator, "objects": {}, } i = 0 parser = iocp.IOC_Parser(None, "pdf", True, "pdfminer", "json") parsed = parser.parse(path) os.remove(path) if parsed != []: for file in parsed: if file != None: for page in file: if page != []: for match in page: resolved_match = self.resolve_match(match) if resolved_match: observable = { "type": resolved_match["type"], "x_opencti_observable_type": resolved_match["type"], "x_opencti_observable_value": resolved_match["value"], "x_opencti_indicator_create": self.create_indicator, } observed_data["objects"][i] = observable i += 1 else: self.helper.log_error("Could not parse the report!") # Get context if len(observed_data["objects"]) > 0: bundle["objects"].append(observed_data) if work_context is not None and len(work_context) > 0: report = self.helper.api.report.read(id=work_context) if report is not None: report_stix = { "type": "report", "id": report["stix_id_key"], "name": report["name"], "description": report["description"], "published": self.helper.api.stix2.format_date(report["published"]), "object_refs": [], } report_stix["object_refs"].append(observed_data["id"]) bundle["objects"].append(report_stix) bundles_sent = self.helper.send_stix2_bundle( json.dumps(bundle), None, False, False) return [ "Sent " + str(len(bundles_sent)) + " stix bundle(s) for worker import" ] # Start the main loop def start(self): self.helper.listen(self._process_message) def resolve_match(self, match): types = { "MD5": ["File-MD5"], "SHA1": ["File-SHA1"], "SHA256": ["File-SHA256"], "Filename": ["File-Name"], "IP": ["IPv4-Addr"], "Host": ["Domain"], "Filepath": ["File-Name"], "URL": ["URL"], "Email": ["Email-Address"], } type = match["type"] value = match["match"] if type in types: resolved_types = types[type] if resolved_types[0] == "IPv4-Addr": type_0 = self.detect_ip_version(value) else: type_0 = resolved_types[0] return {"type": type_0, "value": value} else: return False def detect_ip_version(self, value): if len(value) > 16: return "IPv6-Addr" else: return "IPv4-Addr"
class Misp: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.misp_url = get_config_variable("MISP_URL", ["misp", "url"], config) self.misp_key = get_config_variable("MISP_KEY", ["misp", "key"], config) self.misp_ssl_verify = get_config_variable( "MISP_SSL_VERIFY", ["misp", "ssl_verify"], config ) self.misp_create_report = get_config_variable( "MISP_CREATE_REPORTS", ["misp", "create_reports"], config ) self.misp_report_class = ( get_config_variable("MISP_REPORT_CLASS", ["misp", "report_class"], config) or "MISP Event" ) self.misp_import_from_date = get_config_variable( "MISP_IMPORT_FROM_DATE", ["misp", "import_from_date"], config ) self.misp_import_tags = get_config_variable( "MISP_IMPORT_TAGS", ["misp", "import_tags"], config ) self.misp_interval = get_config_variable( "MISP_INTERVAL", ["misp", "interval"], config, True ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) # Initialize MISP self.misp = ExpandedPyMISP( url=self.misp_url, key=self.misp_key, ssl=self.misp_ssl_verify, debug=False ) def get_interval(self): return int(self.misp_interval) * 60 def run(self): while True: timestamp = int(time.time()) # Get the last_run datetime current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = datetime.utcfromtimestamp( current_state["last_run"] ).strftime("%Y-%m-%d %H:%M:%S") self.helper.log_info("Connector last run: " + last_run) else: last_run = None self.helper.log_info("Connector has never run") # If import with tags complex_query_tag = None if self.misp_import_tags is not None: or_parameters = [] for tag in self.misp_import_tags.split(","): or_parameters.append(tag.strip()) complex_query_tag = self.misp.build_complex_query( or_parameters=or_parameters ) # If import from a specific date import_from_date = None if self.misp_import_from_date is not None: import_from_date = parse(self.misp_import_from_date).strftime( "%Y-%m-%d %H:%M:%S" ) # Prepare the query kwargs = dict() if complex_query_tag is not None: kwargs["tags"] = complex_query_tag if last_run is not None: kwargs["timestamp"] = last_run elif import_from_date is not None: kwargs["date_from"] = import_from_date # Query with pagination of 100 current_page = 1 while True: kwargs["limit"] = 50 kwargs["page"] = current_page self.helper.log_info( "Fetching MISP events with args: " + json.dumps(kwargs) ) events = [] try: events = self.misp.search("events", **kwargs) except Exception as e: self.helper.log_error(str(e)) try: events = self.misp.search("events", **kwargs) except Exception as e: self.helper.log_error(str(e)) self.helper.log_info("MISP returned " + str(len(events)) + " events.") # Break if no more result if len(events) == 0: break try: self.process_events(events) except Exception as e: self.helper.log_error(str(e)) current_page += 1 self.helper.set_state({"last_run": timestamp}) time.sleep(self.get_interval()) def process_events(self, events): for event in events: self.helper.log_info("Processing event " + event["Event"]["uuid"]) ### Default variables added_markings = [] added_entities = [] added_object_refs = [] ### Pre-process # Author author = Identity( name=event["Event"]["Orgc"]["name"], identity_class="organization" ) # Elements event_elements = self.prepare_elements(event["Event"]["Galaxy"], author) # Markings if "Tag" in event["Event"]: event_markings = self.resolve_markings(event["Event"]["Tag"]) else: event_markings = [TLP_WHITE] # Tags event_tags = [] if "Tag" in event["Event"]: event_tags = self.resolve_tags(event["Event"]["Tag"]) # ExternalReference event_external_reference = ExternalReference( source_name=self.helper.connect_name, external_id=event["Event"]["uuid"], url=self.misp_url + "/events/view/" + event["Event"]["uuid"], ) ### Get indicators event_external_references = [event_external_reference] indicators = [] # Get attributes for attribute in event["Event"]["Attribute"]: indicator = self.process_attribute( author, event_elements, event_markings, [], attribute ) if attribute["type"] == "link": event_external_references.append( ExternalReference( source_name=attribute["category"], external_id=attribute["uuid"], url=attribute["value"], ) ) if indicator is not None: indicators.append(indicator) # Get attributes of objects objects_relationships = [] for object in event["Event"]["Object"]: attribute_external_references = [] for attribute in object["Attribute"]: if attribute["type"] == "link": attribute_external_references.append( ExternalReference( source_name=attribute["category"], external_id=attribute["uuid"], url=attribute["value"], ) ) object_attributes = [] for attribute in object["Attribute"]: indicator = self.process_attribute( author, event_elements, event_markings, attribute_external_references, attribute, ) if indicator is not None: indicators.append(indicator) if ( object["meta-category"] == "file" and indicator["indicator"].x_opencti_observable_type in FILETYPES ): object_attributes.append(indicator) objects_relationships.extend( self.process_observable_relations(object_attributes, []) ) ### Prepare the bundle bundle_objects = [author] object_refs = [] # Add event markings for event_marking in event_markings: if event_marking["id"] not in added_markings: bundle_objects.append(event_marking) added_markings.append(event_marking["id"]) # Add event elements all_event_elements = ( event_elements["intrusion_sets"] + event_elements["malwares"] + event_elements["tools"] + event_elements["attack_patterns"] ) for event_element in all_event_elements: if event_element["name"] not in added_object_refs: object_refs.append(event_element) added_object_refs.append(event_element["name"]) if event_element["name"] not in added_entities: bundle_objects.append(event_element) added_entities.append(event_element["name"]) # Add indicators for indicator in indicators: if indicator["indicator"]["id"] not in added_object_refs: object_refs.append(indicator["indicator"]) added_object_refs.append(indicator["indicator"]["id"]) if indicator["indicator"]["id"] not in added_entities: bundle_objects.append(indicator["indicator"]) added_entities.append(indicator["indicator"]["id"]) # Add attribute markings for attribute_marking in indicator["markings"]: if attribute_marking["id"] not in added_markings: bundle_objects.append(attribute_marking) added_markings.append(attribute_marking["id"]) # Add attribute elements all_attribute_elements = ( indicator["attribute_elements"]["intrusion_sets"] + indicator["attribute_elements"]["malwares"] + indicator["attribute_elements"]["tools"] + indicator["attribute_elements"]["attack_patterns"] ) for attribute_element in all_attribute_elements: if attribute_element["name"] not in added_object_refs: object_refs.append(attribute_element) added_object_refs.append(attribute_element["name"]) if attribute_element["name"] not in added_entities: bundle_objects.append(attribute_element) added_entities.append(attribute_element["name"]) # Add attribute relationships for relationship in indicator["relationships"]: object_refs.append(relationship) bundle_objects.append(relationship) # Add object_relationships for object_relationship in objects_relationships: bundle_objects.append(object_relationship) ### Create the report if needed if self.misp_create_report and len(object_refs) > 0: report = Report( name=event["Event"]["info"], description=event["Event"]["info"], published=parse(event["Event"]["date"]), created_by_ref=author, object_marking_refs=event_markings, labels=["threat-report"], object_refs=object_refs, external_references=event_external_references, custom_properties={ "x_opencti_report_class": self.misp_report_class, "x_opencti_object_status": 2, "x_opencti_tags": event_tags, }, ) bundle_objects.append(report) bundle = Bundle(objects=bundle_objects).serialize() self.helper.log_info("Sending event STIX2 bundle") self.helper.send_stix2_bundle( bundle, None, self.update_existing_data, False ) def process_attribute( self, author, event_elements, event_markings, attribute_external_references, attribute, ): try: resolved_attributes = self.resolve_type( attribute["type"], attribute["value"] ) if resolved_attributes is None: return None for resolved_attribute in resolved_attributes: ### Pre-process # Elements attribute_elements = self.prepare_elements(attribute["Galaxy"], author) # Markings & Tags attribute_tags = [] if "Tag" in attribute: attribute_markings = self.resolve_markings( attribute["Tag"], with_default=False ) attribute_tags = self.resolve_tags(attribute["Tag"]) if len(attribute_markings) == 0: attribute_markings = event_markings else: attribute_markings = event_markings ### Create the indicator observable_type = resolved_attribute["type"] observable_value = resolved_attribute["value"] name = resolved_attribute["value"] pattern_type = "stix" # observable type is yara for instance if observable_type in PATTERNTYPES: pattern_type = observable_type observable_type = "Unknown" genuine_pattern = ( "[file:hashes.md5 = 'd41d8cd98f00b204e9800998ecf8427e']" ) pattern = observable_value name = ( attribute["comment"] if len(attribute["comment"]) > 0 else observable_type ) # observable type is not in stix 2 elif observable_type not in OPENCTISTIX2: return None # observable type is in stix else: if "transform" in OPENCTISTIX2[observable_type]: if ( OPENCTISTIX2[observable_type]["transform"]["operation"] == "remove_string" ): observable_value = observable_value.replace( OPENCTISTIX2[observable_type]["transform"]["value"], "" ) lhs = ObjectPath( OPENCTISTIX2[observable_type]["type"], OPENCTISTIX2[observable_type]["path"], ) genuine_pattern = str( ObservationExpression( EqualityComparisonExpression(lhs, observable_value) ) ) pattern = genuine_pattern indicator = Indicator( name=name, description=attribute["comment"], pattern=genuine_pattern, valid_from=datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), labels=["malicious-activity"], created_by_ref=author, object_marking_refs=attribute_markings, external_references=attribute_external_references, custom_properties={ "x_opencti_indicator_pattern": pattern, "x_opencti_observable_type": observable_type, "x_opencti_observable_value": observable_value, "x_opencti_pattern_type": pattern_type, "x_opencti_tags": attribute_tags, }, ) ### Create the relationships relationships = [] # Event threats for threat in ( event_elements["intrusion_sets"] + event_elements["malwares"] + event_elements["tools"] ): relationships.append( Relationship( relationship_type="indicates", created_by_ref=author, source_ref=indicator.id, target_ref=threat.id, description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, }, ) ) # Attribute threats for threat in ( attribute_elements["intrusion_sets"] + attribute_elements["malwares"] + attribute_elements["tools"] ): relationships.append( Relationship( relationship_type="indicates", created_by_ref=author, source_ref=indicator.id, target_ref=threat.id, description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, }, ) ) # Event Attack Patterns for attack_pattern in event_elements["attack_patterns"]: if len(event_elements["malwares"]) > 0: threats = event_elements["malwares"] elif len(event_elements["intrusion_sets"]) > 0: threats = event_elements["intrusion_sets"] else: threats = [] for threat in threats: relationship_uses = Relationship( relationship_type="uses", created_by_ref=author, source_ref=threat.id, target_ref=attack_pattern.id, description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, "x_opencti_ignore_dates": True, }, ) relationships.append(relationship_uses) relationship_indicates = Relationship( relationship_type="indicates", created_by_ref=author, source_ref=indicator.id, target_ref="malware--fa42a846-8d90-4e51-bc29-71d5b4802168", # Fake description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, "x_opencti_source_ref": indicator.id, "x_opencti_target_ref": relationship_uses.id, }, ) relationships.append(relationship_indicates) # Attribute Attack Patterns for attack_pattern in attribute_elements["attack_patterns"]: if len(attribute_elements["malwares"]) > 0: threats = attribute_elements["malwares"] elif len(attribute_elements["intrusion_sets"]) > 0: threats = attribute_elements["intrusion_sets"] else: threats = [] for threat in threats: relationship_uses = Relationship( relationship_type="uses", created_by_ref=author, source_ref=threat.id, target_ref=attack_pattern.id, description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, "x_opencti_ignore_dates": True, }, ) relationships.append(relationship_uses) relationship_indicates = Relationship( relationship_type="indicates", created_by_ref=author, source_ref=indicator.id, target_ref="malware--fa42a846-8d90-4e51-bc29-71d5b4802168", # Fake description=attribute["comment"], object_marking_refs=attribute_markings, custom_properties={ "x_opencti_first_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_last_seen": datetime.utcfromtimestamp( int(attribute["timestamp"]) ).strftime("%Y-%m-%dT%H:%M:%SZ"), "x_opencti_weight": self.helper.connect_confidence_level, "x_opencti_source_ref": indicator.id, "x_opencti_target_ref": relationship_uses.id, "x_opencti_ignore_dates": True, }, ) relationships.append(relationship_indicates) return { "indicator": indicator, "relationships": relationships, "attribute_elements": attribute_elements, "markings": attribute_markings, } except: return None def process_observable_relations( self, object_attributes, result_table, start_element=0 ): if start_element == 0: result_table = [] if len(object_attributes) == 1: return [] for x in range(start_element + 1, len(object_attributes)): result_table.append( Relationship( relationship_type="corresponds", source_ref=object_attributes[start_element]["indicator"]["id"], target_ref=object_attributes[x]["indicator"]["id"], description="Same file", custom_properties={"x_opencti_ignore_dates": True}, ) ) if start_element != len(object_attributes): return self.process_observable_relations( object_attributes, result_table, start_element + 1 ) else: return result_table def prepare_elements(self, galaxies, author): elements = { "intrusion_sets": [], "malwares": [], "tools": [], "attack_patterns": [], } added_names = [] for galaxy in galaxies: # Get the linked intrusion sets if ( ( galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Intrusion Set" ) or (galaxy["namespace"] == "misp" and galaxy["name"] == "Threat Actor") or ( galaxy["namespace"] == "misp" and galaxy["name"] == "Microsoft Activity Group actor" ) ): for galaxy_entity in galaxy["GalaxyCluster"]: if " - G" in galaxy_entity["value"]: name = galaxy_entity["value"].split(" - G")[0] elif "APT " in galaxy_entity["value"]: name = galaxy_entity["value"].replace("APT ", "APT") else: name = galaxy_entity["value"] if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]: aliases = galaxy_entity["meta"]["synonyms"] else: aliases = [name] if name not in added_names: elements["intrusion_sets"].append( IntrusionSet( name=name, labels=["intrusion-set"], description=galaxy_entity["description"], created_by_ref=author, custom_properties={"x_opencti_aliases": aliases}, ) ) added_names.append(name) # Get the linked malwares if ( (galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Malware") or (galaxy["namespace"] == "misp" and galaxy["name"] == "Tool") or (galaxy["namespace"] == "misp" and galaxy["name"] == "Ransomware") or (galaxy["namespace"] == "misp" and galaxy["name"] == "Android") or (galaxy["namespace"] == "misp" and galaxy["name"] == "Malpedia") ): for galaxy_entity in galaxy["GalaxyCluster"]: if " - S" in galaxy_entity["value"]: name = galaxy_entity["value"].split(" - S")[0] else: name = galaxy_entity["value"] if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]: aliases = galaxy_entity["meta"]["synonyms"] else: aliases = [name] if name not in added_names: elements["malwares"].append( Malware( name=name, labels=["malware"], description=galaxy_entity["description"], created_by_ref=author, custom_properties={"x_opencti_aliases": aliases}, ) ) added_names.append(name) # Get the linked tools if galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Tool": for galaxy_entity in galaxy["GalaxyCluster"]: if " - S" in galaxy_entity["value"]: name = galaxy_entity["value"].split(" - S")[0] else: name = galaxy_entity["value"] if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]: aliases = galaxy_entity["meta"]["synonyms"] else: aliases = [name] if name not in added_names: elements["tools"].append( Tool( name=name, labels=["tool"], description=galaxy_entity["description"], created_by_ref=author, custom_properties={"x_opencti_aliases": aliases}, ) ) added_names.append(name) # Get the linked attack_patterns if ( galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Attack Pattern" ): for galaxy_entity in galaxy["GalaxyCluster"]: if " - T" in galaxy_entity["value"]: name = galaxy_entity["value"].split(" - T")[0] else: name = galaxy_entity["value"] if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]: aliases = galaxy_entity["meta"]["synonyms"] else: aliases = [name] if name not in added_names: elements["attack_patterns"].append( AttackPattern( name=name, labels=["attack-pattern"], description=galaxy_entity["description"], created_by_ref=author, custom_properties={ "x_opencti_external_id": galaxy_entity["meta"][ "external_id" ][0], "x_opencti_aliases": aliases, }, ) ) added_names.append(name) return elements def resolve_type(self, type, value): types = { "yara": ["yara"], "md5": ["file-md5"], "sha1": ["file-sha1"], "sha256": ["file-sha256"], "filename": ["file-name"], "pdb": ["pdb-path"], "filename|md5": ["file-name", "file-md5"], "filename|sha1": ["file-name", "file-sha1"], "filename|sha256": ["file-name", "file-sha256"], "ip-src": ["ipv4-addr"], "ip-dst": ["ipv4-addr"], "hostname": ["domain"], "domain": ["domain"], "domain|ip": ["domain", "ipv4-addr"], "url": ["url"], "windows-service-name": ["windows-service-name"], "windows-service-displayname": ["windows-service-display-name"], "windows-scheduled-task": ["windows-scheduled-task"], } if type in types: resolved_types = types[type] if len(resolved_types) == 2: values = value.split("|") if resolved_types[0] == "ipv4-addr": type_0 = self.detect_ip_version(values[0]) else: type_0 = resolved_types[0] if resolved_types[1] == "ipv4-addr": type_1 = self.detect_ip_version(values[1]) else: type_1 = resolved_types[1] return [ {"type": type_0, "value": values[0]}, {"type": type_1, "value": values[1]}, ] else: if resolved_types[0] == "ipv4-addr": type_0 = self.detect_ip_version(value) else: type_0 = resolved_types[0] return [{"type": type_0, "value": value}] def detect_ip_version(self, value): if len(value) > 16: return "ipv6-addr" else: return "ipv4-addr" def resolve_markings(self, tags, with_default=True): markings = [] for tag in tags: if tag["name"] == "tlp:white": markings.append(TLP_WHITE) if tag["name"] == "tlp:green": markings.append(TLP_GREEN) if tag["name"] == "tlp:amber": markings.append(TLP_AMBER) if tag["name"] == "tlp:red": markings.append(TLP_RED) if len(markings) == 0 and with_default: markings.append(TLP_WHITE) return markings def resolve_tags(self, tags): opencti_tags = [] for tag in tags: if ( tag["name"] != "tlp:white" and tag["name"] != "tlp:green" and tag["name"] != "tlp:amber" and tag["name"] != "tlp:red" and not tag["name"].startswith("misp-galaxy:mitre-threat-actor") and not tag["name"].startswith("misp-galaxy:mitre-intrusion-set") and not tag["name"].startswith("misp-galaxy:mitre-malware") and not tag["name"].startswith("misp-galaxy:mitre-attack-pattern") and not tag["name"].startswith("misp-galaxy:mitre-tool") and not tag["name"].startswith("misp-galaxy:tool") and not tag["name"].startswith("misp-galaxy:ransomware") and not tag["name"].startswith("misp-galaxy:malpedia") ): tag_value = tag["name"] if '="' in tag["name"]: tag_value_split = tag["name"].split('="') tag_value = tag_value_split[1][:-1].strip() elif ":" in tag["name"]: tag_value_split = tag["name"].split(":") tag_value = tag_value_split[1].strip() if tag_value.isdigit(): if ":" in tag["name"]: tag_value_split = tag["name"].split(":") tag_value = tag_value_split[1].strip() else: tag_value = tag["name"] opencti_tags.append( {"tag_type": "MISP", "value": tag_value, "color": "#008ac8"} ) return opencti_tags
class UnpacMeConnector: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self.identity = self.helper.api.identity.create( type="Organization", name="UnpacMe", description="UnpacMe", )["standard_id"] self.octi_api_url = get_config_variable("OPENCTI_URL", ["opencti", "url"], config) # Get URL and private from config, use to instantiate the client user_agent = get_config_variable( "UNPAC_ME_USER_AGENT", ["unpac_me", "user_agent"], config, ) api_key = get_config_variable( "UNPAC_ME_API_KEY", ["unpac_me", "api_key"], config, ) self.private = get_config_variable( "UNPAC_ME_PRIVATE", ["unpac_me", "private"], config, ) self.unpacme_client = UnpacMeApi(api_key=api_key, user_agent=user_agent) # Other config settings self.family_color = get_config_variable( "UNPAC_ME_FAMILY_COLOR", ["unpac_me", "family_color"], config, ) self.default_tag_color = get_config_variable( "UNPAC_ME_FAMILY_COLOR", ["unpac_me", "tag_color"], config, ) self.less_noise = get_config_variable( "UNPAC_ME_LESS_NOISE", ["unpac_me", "less_noise"], config, ) self.max_tlp = get_config_variable( "UNPAC_ME_MAX_TLP", ["unpac_me", "max_tlp"], config, ) def _process_results(self, observable, results): bundle_objects = [] unpack_id = results["id"] # Create external reference analysis_url = f"https://www.unpac.me/results/{unpack_id}" external_reference = self.helper.api.external_reference.create( source_name="UnpacMe Results", url=analysis_url, description="UnpacMe Results", ) self.helper.api.stix_cyber_observable.add_external_reference( id=observable["id"], external_reference_id=external_reference["id"], ) # Create default labels extracted_label = self.helper.api.label.create( value="extracted", color=self.default_tag_color) # Parse the results label_ids = [] for result_dict in results["results"]: sha256 = result_dict["hashes"]["sha256"] # If less noise, check to ensure the files were identified as malware if self.less_noise: self.helper.log_info("Less noise is enabled.") if not result_dict["malware_id"]: self.helper.log_info( f"Skipping upload of {sha256} as it had no matching family." ) continue # Download the file file_contents = self.unpacme_client.download(sha256=sha256) # Upload as Artifact to OpenCTI mime_type = magic.from_buffer(file_contents, mime=True) kwargs = { "file_name": sha256, "data": file_contents, "mime_type": mime_type, "x_opencti_description": "UnpacMe extracted file.", } response = self.helper.api.stix_cyber_observable.upload_artifact( **kwargs) # Create Relationship between original and newly uploaded Artifact relationship = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id("relationship"), relationship_type="related-to", created_by_ref=self.identity, source_ref=response["standard_id"], target_ref=observable["standard_id"], ) bundle_objects.append(relationship) # Attach default "extracted" label if response["id"] != observable["id"]: self.helper.api.stix_cyber_observable.add_label( id=response["id"], label_id=extracted_label["id"]) # If found malware ids, attach as labels for malware_id_dict in result_dict["malware_id"]: family_label = self.helper.api.label.create( value=malware_id_dict["name"], color=self.family_color) self.helper.api.stix_cyber_observable.add_label( id=response["id"], label_id=family_label["id"]) label_ids.append(family_label["id"]) # Attach all identified tags to the Artifact for label_id in label_ids: self.helper.api.stix_cyber_observable.add_label( id=observable["id"], label_id=family_label["id"]) # Serialize and send all bundles if bundle_objects: bundle = Bundle(objects=bundle_objects, allow_custom=True).serialize() bundles_sent = self.helper.send_stix2_bundle(bundle) return f"Sent {len(bundles_sent)} stix bundle(s) for worker import" else: return "Nothing to attach" def _process_file(self, observable): if not observable["importFiles"]: raise ValueError( f"No files found for {observable['observable_value']}") # Build the URI to download the file file_id = observable["importFiles"][0]["id"] file_uri = f"{self.octi_api_url}/storage/get/{file_id}" file_content = self.helper.api.fetch_opencti_file(file_uri, True) # Submit sample for analysis upload = self.unpacme_client.upload(data=file_content, private=self.private) # Wait for the analysis to finish while True: response = self.unpacme_client.status(upload=upload) if response == UnpacMeStatus.COMPLETE: break elif response == UnpacMeStatus.FAIL: raise ValueError(f"UnpacMe failed to analyze {file_id}") time.sleep(20) # Analysis is complete, get the results results = self.unpacme_client.results(upload=upload) results = results.raw_json self.helper.log_info( f"Analysis complete, processing results: {results}...") return self._process_results(observable, results) def _process_observable(self, observable): self.helper.log_info("Processing the observable " + observable["observable_value"]) # If File, Artifact if observable["entity_type"] == "Artifact": return self._process_file(observable) else: raise ValueError( f"Failed to process observable, {observable['entity_type']} is not a supported entity type." ) def _process_message(self, data): entity_id = data["entity_id"] observable = self.helper.api.stix_cyber_observable.read(id=entity_id) if observable is None: raise ValueError( "Observable not found " "(may be linked to data seggregation, check your group and permissions)" ) # Extract TLP tlp = "TLP:WHITE" for marking_definition in observable["objectMarking"]: if marking_definition["definition_type"] == "TLP": tlp = marking_definition["definition"] if not OpenCTIConnectorHelper.check_max_tlp(tlp, self.max_tlp): raise ValueError( "Do not send any data, TLP of the observable is greater than MAX TLP" ) return self._process_observable(observable) # Start the main loop def start(self): self.helper.listen(self._process_message)
class CyberThreatCoalition: _OBSERVABLE_PATH = { "Domain-Name": ["value"], "IPv4-Addr": ["value"], "File_sha256": ["hashes", "SHA-256"], "File_sha1": ["hashes", "SHA-1"], "File_md5": ["hashes", "MD5"], "Url": ["value"], } _INDICATOR_PATTERN = { "Domain-Name": "[domain-name:value = '{}']", "IPv4-Addr": "[ipv4-addr:value = '{}']", "File_sha256": "[file:hashes.SHA-256 = '{}']", "File_sha1": "[file:hashes.SHA-1 = '{}']", "File_md5": "[file:hashes.MD5 = '{}']", "Url": "[url:value = '{}']", } _STATE_LAST_RUN = "last_run" def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.cyber_threat_coalition_interval = get_config_variable( "CYBER_THREAT_COALITION_INTERVAL", ["cyber-threat-coalition", "interval_sec"], config, True, ) self.cyber_threat_coalition_base_url = get_config_variable( "CYBER_THREAT_COALITION_BASE_URL", ["cyber-threat-coalition", "base_url"], config, False, ) self.cyber_threat_coalition_create_indicators = get_config_variable( "CYBER_THREAT_COALITION_CREATE_INDICATORS", ["cyber-threat-coalition", "create_indicators"], config, ) self.cyber_threat_coalition_create_observables = get_config_variable( "CYBER_THREAT_COALITION_CREATE_OBSERVABLES", ["cyber-threat-coalition", "create_observables"], config, ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self) -> int: return int(self.cyber_threat_coalition_interval) @staticmethod def get_hash_type(hash_value): if re.match(r"^[0-9a-fA-F]{32}$", hash_value): return "File_md5" elif re.match(r"^[0-9a-fA-F]{40}$", hash_value): return "File_sha1" elif re.match(r"^[0-9a-fA-F]{64}$", hash_value): return "File_sha256" def fetch_and_send(self): timestamp = int(time.time()) now = datetime.utcfromtimestamp(timestamp) friendly_name = "Cyber Threat Coalition run @ " + now.strftime( "%Y-%m-%d %H:%M:%S" ) work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name ) bundle_objects = list() # create an identity for the coalition team organization = stix2.Identity( id=OpenCTIStix2Utils.generate_random_stix_id("identity"), name="Cyber Threat Coalition Team", identity_class="organization", description="Team of Experts collecting and sharing pandemic related " "cyber threat intelligence during the COVID-19 crisis time", ) # add organization in bundle bundle_objects.append(organization) report_object_refs = list() for collection in ["domain", "ip", "url", "hash"]: # fetch backlist url = self.cyber_threat_coalition_base_url + "/" + str(collection) + ".txt" response = requests.get(url=url) if response.status_code != 200: raise Exception( "Unable to fetch {0} blacklist, server returned status: {1}", collection, response.status_code, ) pattern_type = "stix" labels = ["COVID-19", "malicious-activity"] # parse content for data in response.iter_lines(decode_unicode=True): observable_type = None observable_resolver = None if data and not data.startswith("#"): if collection == "domain": observable_resolver = "Domain-Name" observable_type = "Domain-Name" elif collection == "ip": observable_resolver = "IPv4-Addr" observable_type = "IPv4-Addr" elif collection == "url": observable_resolver = "Url" observable_type = "Url" data = urllib.parse.quote(data, "/:") elif collection == "hash": observable_resolver = self.get_hash_type() observable_type = "File" indicator = None if observable_resolver is None or observable_type is None: return if self.cyber_threat_coalition_create_indicators: indicator = stix2.Indicator( id=OpenCTIStix2Utils.generate_random_stix_id("indicator"), name=data, pattern_type=pattern_type, pattern=self._INDICATOR_PATTERN[observable_resolver].format( data ), labels=labels, created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], custom_properties={ "x_opencti_main_observable_type": observable_type, }, ) bundle_objects.append(indicator) report_object_refs.append(indicator["id"]) if self.cyber_threat_coalition_create_observables: observable = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable" ), key=observable_type + "." + ".".join(self._OBSERVABLE_PATH[observable_resolver]), value=data, labels=labels, created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], ) bundle_objects.append(observable) report_object_refs.append(observable["id"]) if indicator is not None: relationship = stix2.Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship" ), relationship_type="based-on", created_by_ref=organization, source_ref=indicator.id, target_ref=observable.id, ) bundle_objects.append(relationship) report_object_refs.append(relationship["id"]) # create a global threat report report_uuid = "report--552b3ae6-8522-409d-8b72-a739bc1926aa" report_external_reference = stix2.ExternalReference( source_name="Cyber Threat Coalition", url="https://www.cyberthreatcoalition.org", external_id="COVID19-CTC", ) if report_object_refs: stix_report = stix2.Report( id=report_uuid, name="COVID-19 Cyber Threat Coalition (CTC) BlackList", type="report", description="This report represents the whole COVID-19 CTC blacklist.", published=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), created_by_ref=organization, object_marking_refs=[stix2.TLP_WHITE], labels=labels, external_references=[report_external_reference], object_refs=report_object_refs, ) # add report in bundle bundle_objects.append(stix_report) # create stix bundle bundle = stix2.Bundle(objects=bundle_objects) # send data self.helper.send_stix2_bundle( bundle=bundle.serialize(), update=self.update_existing_data, work_id=work_id ) return work_id def _load_state(self) -> Dict[str, Any]: current_state = self.helper.get_state() if not current_state: return {} return current_state def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool: if last_run is None: return True time_diff = current_time - last_run return time_diff >= self.get_interval() @staticmethod def _get_state_value( state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None ) -> Any: if state is not None: return state.get(key, default) return default @staticmethod def _current_unix_timestamp() -> int: return int(time.time()) def run(self): self.helper.log_info("Fetching Cyber Threat Coalition vetted blacklists...") while True: try: timestamp = self._current_unix_timestamp() current_state = self._load_state() self.helper.log_info(f"Loaded state: {current_state}") last_run = self._get_state_value(current_state, self._STATE_LAST_RUN) if self._is_scheduled(last_run, timestamp): # fetch data and send as stix bundle work_id = self.fetch_and_send() new_state = current_state.copy() new_state[self._STATE_LAST_RUN] = self._current_unix_timestamp() message = f"Run done. Storing new state: {new_state}" self.helper.log_info(message) self.helper.api.work.to_processed(work_id, message) self.helper.set_state(new_state) self.helper.log_info( f"State stored, next run in: {self.get_interval()} seconds" ) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( f"Connector will not run, next run in: {new_interval} seconds" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as ex: self.helper.log_error(str(ex)) time.sleep(60)
class OpenCTI: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml" config = ( yaml.load(open(config_file_path), Loader=yaml.SafeLoader) if os.path.isfile(config_file_path) else {} ) self.helper = OpenCTIConnectorHelper(config) # Extra config self.opencti_sectors_file_url = get_config_variable( "CONFIG_SECTORS_FILE_URL", ["config", "sectors_file_url"], config ) self.opencti_geography_file_url = get_config_variable( "CONFIG_GEOGRAPHY_FILE_URL", ["config", "geography_file_url"], config ) self.opencti_interval = get_config_variable( "CONFIG_INTERVAL", ["config", "interval"], config, True ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) def get_interval(self): return int(self.opencti_interval) * 60 * 60 * 24 def run(self): self.helper.log_info("Fetching OpenCTI datasets...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_run" in current_state: last_run = current_state["last_run"] self.helper.log_info( "Connector last run: " + datetime.utcfromtimestamp(last_run).strftime( "%Y-%m-%d %H:%M:%S" ) ) else: last_run = None self.helper.log_info("Connector has never run") # If the last_run is more than interval-1 day if last_run is None or ( (timestamp - last_run) > ((int(self.opencti_interval) - 1) * 60 * 60 * 24) ): now = datetime.utcfromtimestamp(timestamp) friendly_name = "OpenCTI datasets run @ " + now.strftime( "%Y-%m-%d %H:%M:%S" ) work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name ) try: sectors_data = urllib.request.urlopen( self.opencti_sectors_file_url ).read() self.helper.send_stix2_bundle( sectors_data.decode("utf-8"), entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) except Exception as e: self.helper.log_error(str(e)) try: geography_data = urllib.request.urlopen( self.opencti_geography_file_url ).read() self.helper.send_stix2_bundle( geography_data.decode("utf-8"), entities_types=self.helper.connect_scope, update=self.update_existing_data, work_id=work_id, ) except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp ) self.helper.log_info(message) self.helper.set_state({"last_run": timestamp}) self.helper.api.work.to_processed(work_id, message) self.helper.log_info( "Last_run stored, next run in: " + str(round(self.get_interval() / 60 / 60 / 24, 2)) + " days" ) time.sleep(60) else: new_interval = self.get_interval() - (timestamp - last_run) self.helper.log_info( "Connector will not run, next run in: " + str(round(new_interval / 60 / 60 / 24, 2)) + " days" ) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class TheHive: def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.SafeLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) # Extra config self.thehive_url = get_config_variable("THEHIVE_URL", ["thehive", "url"], config) self.thehive_api_key = get_config_variable("THEHIVE_API_KEY", ["thehive", "api_key"], config) self.thehive_check_ssl = get_config_variable("THEHIVE_CHECK_SSL", ["thehive", "check_ssl"], config, False, True) self.thehive_organization_name = get_config_variable( "THEHIVE_ORGANIZATION_NAME", ["thehive", "organization_name"], config) self.thehive_import_from_date = get_config_variable( "THEHIVE_IMPORT_FROM_DATE", ["thehive", "import_from_date"], config, False, datetime.utcfromtimestamp(int( time.time())).strftime("%Y-%m-%d %H:%M:%S"), ) self.update_existing_data = get_config_variable( "CONNECTOR_UPDATE_EXISTING_DATA", ["connector", "update_existing_data"], config, ) self.identity = self.helper.api.identity.create( type="Organization", name=self.thehive_organization_name, description=self.thehive_organization_name, ) self.thehive_api = TheHiveApi(self.thehive_url, self.thehive_api_key, cert=self.thehive_check_ssl) def generate_case_bundle(self, case): markings = [] if case["tlp"] == 0: markings.append(TLP_WHITE) if case["tlp"] == 1: markings.append(TLP_GREEN) if case["tlp"] == 2: markings.append(TLP_AMBER) if case["tlp"] == 3: markings.append(TLP_RED) if len(markings) == 0: markings.append(TLP_WHITE) bundle_objects = [] incident = StixXOpenCTIIncident( id=OpenCTIStix2Utils.generate_random_stix_id("x-opencti-incident"), name=case["title"], description=case["description"], first_seen=datetime.utcfromtimestamp( int(case["createdAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"), last_seen=datetime.utcfromtimestamp( int(case["updatedAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"), object_marking_refs=markings, labels=case["tags"] if "tags" in case else [], created_by_ref=self.identity["standard_id"], ) bundle_objects.append(incident) # Get observables observables = self.thehive_api.get_case_observables( case_id=case["id"]).json() for observable in observables: if observable["dataType"] == "hash": if len(observable["data"]) == 32: data_type = "file_md5" elif len(observable["data"]) == 40: data_type = "file_sha1" elif len(observable["data"]) == 64: data_type = "file_sha256" else: data_type = "unknown" else: data_type = observable["dataType"] observable_key = OBSERVABLES_MAPPING[data_type] if observable_key is not None: stix_observable = SimpleObservable( id=OpenCTIStix2Utils.generate_random_stix_id( "x-opencti-simple-observable"), key=observable_key, value=observable["data"], description=observable["message"], x_opencti_score=80 if observable["ioc"] else 50, object_marking_refs=markings, labels=observable["tags"] if "tags" in observable else [], created_by_ref=self.identity["standard_id"], x_opencti_create_indicator=observable["ioc"], ) stix_observable_relation = Relationship( id=OpenCTIStix2Utils.generate_random_stix_id( "relationship"), relationship_type="related-to", created_by_ref=self.identity["standard_id"], source_ref=stix_observable.id, target_ref=incident.id, object_marking_refs=markings, ) bundle_objects.append(stix_observable) bundle_objects.append(stix_observable_relation) if observable["sighted"]: fake_indicator_id = ( "indicator--c1034564-a9fb-429b-a1c1-c80116cc8e1e") stix_sighting = Sighting( id=OpenCTIStix2Utils.generate_random_stix_id( "sighting"), first_seen=datetime.utcfromtimestamp( int(observable["startDate"] / 1000)).strftime("%Y-%m-%dT%H:%M:%SZ"), last_seen=datetime.utcfromtimestamp( int(observable["startDate"] / 1000 + 3600)).strftime("%Y-%m-%dT%H:%M:%SZ"), where_sighted_refs=[self.identity["standard_id"]], sighting_of_ref=fake_indicator_id, custom_properties={ "x_opencti_sighting_of_ref": stix_observable.id }, ) bundle_objects.append(stix_sighting) bundle = Bundle(objects=bundle_objects).serialize() return bundle def run(self): self.helper.log_info("Starting TheHive Connector...") while True: try: # Get the current timestamp and check timestamp = int(time.time()) current_state = self.helper.get_state() if current_state is not None and "last_case_date" in current_state: last_case_date = current_state["last_case_date"] self.helper.log_info( "Connector last_case_date: " + datetime.utcfromtimestamp(last_case_date).strftime( "%Y-%m-%d %H:%M:%S")) else: last_case_date = parse( self.thehive_import_from_date).timestamp() self.helper.log_info("Connector has no last_case_date") self.helper.log_info("Get cases since last run (" + datetime.utcfromtimestamp(last_case_date). strftime("%Y-%m-%d %H:%M:%S") + ")") query = Or( Gt("updatedAt", int(last_case_date * 1000)), Child("case_task", Gt("createdAt", int(last_case_date * 1000))), Child("case_artifact", Gt("createdAt", int(last_case_date * 1000))), ) cases = self.thehive_api.find_cases(query=query, sort="updatedAt", range="0-100").json() now = datetime.utcfromtimestamp(timestamp) friendly_name = "TheHive run @ " + now.strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) try: for case in cases: stix_bundle = self.generate_case_bundle(case) self.helper.send_stix2_bundle( stix_bundle, update=self.update_existing_data, work_id=work_id, ) except Exception as e: self.helper.log_error(str(e)) # Store the current timestamp as a last run message = "Connector successfully run, storing last_run as " + str( timestamp) self.helper.log_info(message) self.helper.api.work.to_processed(work_id, message) current_state = self.helper.get_state() if current_state is None: current_state = {"last_case_date": timestamp} else: current_state["last_case_date"] = timestamp self.helper.set_state(current_state) time.sleep(60) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") exit(0) except Exception as e: self.helper.log_error(str(e)) time.sleep(60)
class Sekoia(object): limit = 200 def __init__(self): # Instantiate the connector helper from config config_file_path = os.path.dirname( os.path.abspath(__file__)) + "/config.yml" config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}) self.helper = OpenCTIConnectorHelper(config) self._cache = {} # Extra config self.base_url = self.get_config("base_url", config, "https://api.sekoia.io") self.start_date: str = self.get_config("start_date", config, None) self.collection = self.get_config( "collection", config, "d6092c37-d8d7-45c3-8aff-c4dc26030608") self.create_observables = self.get_config("create_observables", config, True) self.helper.log_info("Setting up api key") self.api_key = self.get_config("api_key", config) if not self.api_key: self.helper.log_error("API key is Missing") raise ValueError("API key is Missing") self._load_data_sets() self.helper.log_info("All datasets has been loaded") def run(self): self.helper.log_info("Starting SEKOIA.IO connector") state = self.helper.get_state() or {} cursor = state.get("last_cursor", self.generate_first_cursor()) self.helper.log_info(f"Starting with {cursor}") while True: friendly_name = "SEKOIA run @ " + datetime.utcnow().strftime( "%Y-%m-%d %H:%M:%S") work_id = self.helper.api.work.initiate_work( self.helper.connect_id, friendly_name) try: cursor = self._run(cursor, work_id) message = f"Connector successfully run, cursor updated to {cursor}" self.helper.log_info(message) self.helper.api.work.to_processed(work_id, message) except (KeyboardInterrupt, SystemExit): self.helper.log_info("Connector stop") self.helper.api.work.to_processed(work_id, "Connector is stopping") exit(0) except Exception as ex: # In case of error try to get the last updated cursor # since `_run` updates it after every successful request state = self.helper.get_state() or {} cursor = state.get("last_cursor", cursor) self.helper.log_error(str(ex)) message = f"Connector encountered an error, cursor updated to {cursor}" self.helper.api.work.to_processed(work_id, message) time.sleep(60) @staticmethod def get_config(name: str, config, default: Any = None): env_name = f"SEKOIA_{name.upper()}" result = get_config_variable(env_name, ["sekoia", name], config) return result or default def get_collection_url(self): return urljoin(self.base_url, "v2/inthreat/collections", self.collection, "objects") def get_object_url(self, ids: Iterable): return urljoin(self.base_url, "v2/inthreat/objects", ",".join(ids)) def get_relationship_url(self, ids: Iterable): return urljoin(self.base_url, "v2/inthreat/relationships", ",".join(ids)) def get_file_url(self, item_id: str, file_hash: str): return urljoin(self.base_url, "v2/inthreat/objects", item_id, "files", file_hash) def generate_first_cursor(self) -> str: """ Generate the first cursor to interrogate the API so we don't start at the beginning. """ start = f"{(datetime.utcnow() - timedelta(hours=1)).isoformat()}Z" if self.start_date: try: start = f"{parse(self.start_date).isoformat()}Z" except ParserError: pass return base64.b64encode(start.encode("utf-8")).decode("utf-8") @staticmethod def chunks(items, chunk_size): """ Yield successive n-sized chunks from items. """ for i in range(0, len(items), chunk_size): yield items[i:i + chunk_size] def _run(self, cursor, work_id): while True: params = {"limit": self.limit, "cursor": cursor} data = self._send_request(self.get_collection_url(), params) if not data: return cursor cursor = data[ "next_cursor"] or cursor # In case next_cursor is None items = data["items"] if not items: return cursor items = self._retrieve_references(items) self._add_main_observable_type_to_indicators(items) if self.create_observables: self._add_create_observables_to_indicators(items) items = self._clean_ic_fields(items) self._add_files_to_items(items) bundle = self.helper.stix2_create_bundle(items) try: self.helper.send_stix2_bundle(bundle, update=True, work_id=work_id) except RecursionError: self.helper.send_stix2_bundle(bundle, update=True, work_id=work_id, bypass_split=True) self.helper.set_state({"last_cursor": cursor}) if len(items) < self.limit: # We got the last results return cursor def _clean_ic_fields(self, items: List[Dict]) -> List[Dict]: """ Remove fields specific to the Intelligence Center that will not add value in OpenCTI """ return [{ field: value for field, value in item.items() if not self._field_to_ignore(field) } for item in items] @staticmethod def _field_to_ignore(field: str) -> bool: to_ignore = [ "x_ic_impacted_locations", "x_ic_impacted_sectors", ] return ((field.startswith("x_ic") or field.startswith("x_inthreat")) and (field.endswith("ref") or field.endswith("refs"))) or field in to_ignore @staticmethod def _add_create_observables_to_indicators(items: List[Dict]): for item in items: if item.get("type") == "indicator": item["x_opencti_create_observables"] = True @staticmethod def _add_main_observable_type_to_indicators(items: List[Dict]): for item in items: if (item.get("type") == "indicator" and item.get("x_ic_observable_types") is not None and len(item.get("x_ic_observable_types")) > 0): stix_type = item.get("x_ic_observable_types")[0] item[ "x_opencti_main_observable_type"] = OpenCTIStix2Utils.stix_observable_opencti_type( stix_type) def _retrieve_references(self, items: List[Dict], current_depth: int = 0) -> List[Dict]: """ Retrieve the references that appears in the given items. To avoid having an infinite recursion a safe guard has been implemented. """ if current_depth == 5: # Safe guard to avoid infinite recursion if an object was not found for example return items items = self._update_mapped_refs(items) to_fetch = self._get_missing_refs(items) for ref in list(to_fetch): if ref in self._cache: items.append(self._cache[ref]) to_fetch.remove(ref) if not to_fetch: return items objects_to_fetch = [ i for i in to_fetch if not i.startswith("relationship--") ] items += self._retrieve_by_ids(objects_to_fetch, self.get_object_url) relationships_to_fetch = [ i for i in to_fetch if i.startswith("relationship--") ] items += self._retrieve_by_ids(relationships_to_fetch, self.get_relationship_url) return self._retrieve_references(items, current_depth + 1) def _get_missing_refs(self, items: List[Dict]) -> Set: """ Get the object's references that are missing """ ids = {item["id"] for item in items} refs = set() for item in items: refs.update(item.get("object_marking_refs", [])) if item.get("created_by_ref"): refs.add(item["created_by_ref"]) if item["type"] == "report": object_refs = [ ref for ref in item.get("object_refs", []) if not self._is_mapped_ref(ref) ] refs.update(object_refs) if item["type"] == "relationship": if not self._is_mapped_ref(item["source_ref"]): refs.add(item["source_ref"]) if not self._is_mapped_ref(item["target_ref"]): refs.add(item["target_ref"]) return refs - ids def _is_mapped_ref(self, ref: str) -> bool: """ Whether or not the reference is a mapped one. """ return (ref in self._geography_mapping.values() or ref in self._sectors_mapping.values()) def _update_mapped_refs(self, items: List[Dict]): """ Update references that are mapped between SEKOIA and OpenCTI. This way we will be able to create links with OpenCTI own sectors and locations. """ for item in items: if item.get("object_marking_refs"): item["object_marking_refs"] = self._replace_mapped_refs( item["object_marking_refs"]) if item.get("object_refs"): item["object_refs"] = self._replace_mapped_refs( item["object_refs"]) if item.get("source_ref"): item["source_ref"] = self._get_mapped_ref(item["source_ref"]) if item.get("target_ref"): item["target_ref"] = self._get_mapped_ref(item["target_ref"]) return items def _replace_mapped_refs(self, refs: List): for i, ref in enumerate(refs): refs[i] = self._get_mapped_ref(ref) return refs def _get_mapped_ref(self, ref: str): if ref in self._geography_mapping: return self._geography_mapping[ref] if ref in self._sectors_mapping: return self._sectors_mapping[ref] return ref def _retrieve_by_ids(self, ids, url_callback): """ Fetch the items for the given ids. """ items = [] for chunk in self.chunks(ids, 40): url = url_callback(chunk) res = self._send_request(url) if not res: continue if "items" in res: items.extend(res["items"]) for item in res["items"]: self._clean_and_add_to_cache(item) if "data" in res: items.append(res["data"]) self._clean_and_add_to_cache(res["data"]) return items def _clean_and_add_to_cache(self, item): """ Add item to the cache only if it is an identity or a marking definition """ if item["id"].startswith( "marking-definition--") or item["id"].startswith("identity--"): if item["id"].startswith("marking-definition--"): item.pop("object_marking_refs", None) self._cache[item["id"]] = item def _send_request(self, url, params=None, binary=False): """ Sends the HTTP request and handle the errors """ try: headers = {"Authorization": f"Bearer {self.api_key}"} res = requests.get(url, params=params, headers=headers) res.raise_for_status() if binary: return res.content return res.json() except RequestException as ex: if ex.response: error = f"Request failed with status: {ex.response.status_code}" self.helper.log_error(error) else: self.helper.log_error(str(ex)) return None def _load_data_sets(self): # Mapping between SEKOIA sectors/locations and OpenCTI ones self.helper.log_info("Loading locations mapping") with open("./data/geography_mapping.json") as fp: self._geography_mapping: Dict = json.load(fp) self.helper.log_info("Loading sectors mapping") with open("./data/sectors_mapping.json") as fp: self._sectors_mapping: Dict = json.load(fp) # Adds OpenCTI sectors/locations to cache self.helper.log_info("Loading OpenCTI sectors") with open("./data/sectors.json") as fp: objects = json.load(fp)["objects"] for sector in objects: self._clean_and_add_to_cache(sector) self.helper.log_info("Loading OpenCTI locations") with open("./data/geography.json") as fp: for geography in json.load(fp)["objects"]: self._clean_and_add_to_cache(geography) def _add_files_to_items(self, items: List[Dict]): for item in items: if not item.get("x_inthreat_uploaded_files"): continue item["x_opencti_files"] = [] for file in item.get("x_inthreat_uploaded_files", []): url = self.get_file_url(item["id"], file["sha256"]) data = self._send_request(url, binary=True) if data: item["x_opencti_files"].append({ "name": file["file_name"], "data": base64.b64encode(data).decode("utf-8"), "mime_type": file.get("mime_type", "text/plain"), })