Beispiel #1
0
class ImportFileStix:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)

    def _process_message(self, data):
        old_token = self.helper.api.get_token()
        token = None
        if "token" in data:
            token = data["token"]
        file_path = data["file_path"]
        update = data["update"]
        file_uri = self.helper.opencti_url + file_path
        self.helper.log_info("Importing the file " + file_uri)
        file_content = self.helper.api.fetch_opencti_file(file_uri)
        if token:
            self.helper.api.set_token(token)
        bundles_sent = self.helper.send_stix2_bundle(file_content, None,
                                                     update)
        self.helper.api.set_token(old_token)
        return [
            "Sent " + str(len(bundles_sent)) +
            " stix bundle(s) for worker import"
        ]

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #2
0
class ImportFileStix:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + '/config.yml'
        config = yaml.load(open(config_file_path), Loader=yaml.FullLoader
                           ) if os.path.isfile(config_file_path) else {}
        self.helper = OpenCTIConnectorHelper(config)

    def _process_message(self, data):
        file_path = data['file_path']
        update = data['update']
        file_uri = self.helper.opencti_url + file_path
        self.helper.log_info('Importing the file ' + file_uri)
        file_content = self.helper.api.fetch_opencti_file(file_uri)
        bundles_sent = self.helper.send_stix2_bundle(file_content, None,
                                                     update)
        return [
            'Sent ' + str(len(bundles_sent)) +
            ' stix bundle(s) for worker import'
        ]

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #3
0
class ImportFileStix:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)

    def _process_message(self, data):
        file_fetch = data["file_fetch"]
        file_uri = self.helper.opencti_url + file_fetch
        self.helper.log_info("Importing the file " + file_uri)
        file_content = self.helper.api.fetch_opencti_file(file_uri)
        if data["file_mime"] == "text/xml":
            initialize_options()
            file_content = elevate(file_content)
        bundles_sent = self.helper.send_stix2_bundle(file_content)
        return "Sent " + str(len(bundles_sent)) + " stix bundle(s) for worker import"

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #4
0
class InternalImportConnector:
    def __init__(self, config_file_path: str, api_client: OpenCTIApiClient,
                 data: Dict):
        # set OPENCTI settings from fixture
        os.environ["OPENCTI_URL"] = api_client.api_url
        os.environ["OPENCTI_TOKEN"] = api_client.api_token
        os.environ["OPENCTI_SSL_VERIFY"] = str(api_client.ssl_verify)

        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})

        self.helper = OpenCTIConnectorHelper(config)
        self.data = data

    def _process_message(self, data: Dict) -> str:
        file_fetch = data["file_fetch"]
        file_uri = self.helper.opencti_url + file_fetch

        # Downloading and saving file to connector
        self.helper.log_info("Importing the file " + file_uri)

        observable = SimpleObservable(
            id=OpenCTIStix2Utils.generate_random_stix_id(
                "x-opencti-simple-observable"),
            key=self.data["simple_observable_key"],
            value=self.data["simple_observable_value"],
        )

        bundle_objects = [observable]
        entity_id = data.get("entity_id", None)
        report = self.helper.api.report.read(id=entity_id)

        report = Report(
            id=report["standard_id"],
            name=report["name"],
            description=report["description"],
            published=self.helper.api.stix2.format_date(report["published"]),
            report_types=report["report_types"],
            object_refs=bundle_objects,
        )

        bundle_objects.append(report)
        # create stix bundle
        bundle = Bundle(objects=bundle_objects).serialize()
        # send data
        self.helper.send_stix2_bundle(bundle=bundle)
        return "foo"

    def stop(self):
        self.helper.stop()

    def start(self):
        try:
            self.helper.listen(self._process_message)
        except pika.exceptions.AMQPConnectionError:
            self.stop()
            raise ValueError(
                "Connector was not able to establish the connection to RabbitMQ"
            )
Beispiel #5
0
class LastInfoSec:
    def __init__(self):
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        self.lastinfosec_url = get_config_variable("CONFIG_LIS_URL",
                                                   ["lastinfosec", "api_url"],
                                                   config)
        self.lastinfosec_apikey = get_config_variable(
            "CONFIG_LIS_APIKEY", ["lastinfosec", "api_key"], config)
        self.opencti_url = get_config_variable("OPENCTI_URL",
                                               ["opencti", "url"], config)
        self.opencti_id = get_config_variable("OPENCTI_TOKEN",
                                              ["opencti", "token"], config)
        self.update_existing_data = True
        self.api = OpenCTIApiClient(self.opencti_url, self.opencti_id)

    def run(self):
        self.helper.log_info("Fetching lastinfosec datasets...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                now = datetime.datetime.utcfromtimestamp(timestamp)
                friendly_name = "MITRE run @ " + now.strftime(
                    "%Y-%m-%d %H:%M:%S")
                work_id = self.helper.api.work.initiate_work(
                    self.helper.connect_id, friendly_name)
                lastinfosec_data = requests.get(
                    self.lastinfosec_url + self.lastinfosec_apikey).json()
                if "message" in lastinfosec_data.keys():
                    for data in lastinfosec_data["message"]:
                        sdata = json.dumps(data)
                        self.helper.send_stix2_bundle(sdata, work_id=work_id)
                    # Store the current timestamp as a last run
                    message = (
                        "Connector successfully run, storing last_run as {0}".
                        format(timestamp))
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.api.work.to_processed(work_id, message)
                    self.helper.log_info(message)
                    time.sleep(3500)
                else:
                    message = (
                        "Connector successfully run, storing last_run as {0}".
                        format(timestamp))
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.api.work.to_processed(work_id, message)
                    self.helper.log_info(message)
                    time.sleep(300)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error("run:" + str(e))
                time.sleep(60)
Beispiel #6
0
class ExportFileStix:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + '/config.yml'
        config = yaml.load(open(config_file_path), Loader=yaml.FullLoader
                           ) if os.path.isfile(config_file_path) else {}
        self.helper = OpenCTIConnectorHelper(config)

    def _process_message(self, data):
        entity_id = data['entity_id']
        file_name = data['file_name']
        entity_type = data['entity_type']
        export_type = data['export_type']
        self.helper.log_info('Exporting: ' + entity_type + '/' + export_type +
                             '(' + entity_id + ') to ' + file_name)
        bundle = self.helper.api.stix2_export_entity(entity_type, entity_id,
                                                     export_type)
        json_bundle = json.dumps(bundle, indent=4)
        self.helper.log_info('Uploading: ' + entity_type + '/' + export_type +
                             '(' + entity_id + ') to ' + file_name)
        self.helper.api.push_stix_domain_entity_export(entity_id, file_name,
                                                       json_bundle)
        self.helper.log_info('Export done: ' + entity_type + '/' +
                             export_type + '(' + entity_id + ') to ' +
                             file_name)
        return ['Export done']

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #7
0
class ThreatBusConnector(object):
    def __init__(self):
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )

        # Connector configuration
        self.entity_name = get_config_variable(
            "CONNECTOR_ENTITY_NAME", ["connector", "entity_name"], config
        )
        self.entity_desc = get_config_variable(
            "CONNECTOR_ENTITY_DESCRIPTION", ["connector", "entity_description"], config
        )
        self.forward_all_iocs = get_config_variable(
            "CONNECTOR_FORWARD_ALL_IOCS", ["connector", "forward_all_iocs"], config
        )
        self.threatbus_entity = None

        # Custom configuration for Threat Bus ZeroMQ-App plugin endpoint
        self.threatbus_zmq_host = get_config_variable(
            "THREATBUS_ZMQ_HOST", ["threatbus", "zmq_host"], config
        )
        self.threatbus_zmq_port = get_config_variable(
            "THREATBUS_ZMQ_PORT", ["threatbus", "zmq_port"], config
        )

        # Helper initialization
        self.opencti_helper = OpenCTIConnectorHelper(config)
        zmq_endpoint = f"{self.threatbus_zmq_host}:{self.threatbus_zmq_port}"
        self.threatbus_helper = ThreatBusConnectorHelper(
            zmq_endpoint,
            self._report_sighting,
            self.opencti_helper.log_info,
            self.opencti_helper.log_error,
            subscribe_topic="stix2/sighting",
            publish_topic="stix2/indicator",
        )

    def _get_threatbus_entity(self) -> int:
        """
        Get the Threat Bus OpenCTI entity. Creates a new entity if it does not
        exist yet.
        """

        # Use cached:
        if self.threatbus_entity is not None:
            return self.threatbus_entity

        # Try and fetch existing:
        threatbus_entity = (
            self.opencti_helper.api.stix_domain_object.get_by_stix_id_or_name(
                name=self.entity_name
            )
        )
        if threatbus_entity is not None and threatbus_entity.get("id", None):
            self.threatbus_entity = threatbus_entity
            return self.threatbus_entity

        # Create a new one:
        self.opencti_helper.log_info(
            f"Creating new OpenCTI Threat Bus entity '{self.entity_name}'"
        )
        self.threatbus_entity = self.opencti_helper.api.identity.create(
            type="Organization",
            name=self.entity_name,
            description=self.entity_desc,
        )
        return self.threatbus_entity

    def _report_sighting(self, msg: str):
        """
        Converts a JSON string to a STIX-2 Sighting and reports it to OpenCTI.
        @param msg The JSON string
        """
        try:
            sighting: Sighting = parse(msg, allow_custom=True)
        except Exception as e:
            self.opencti_helper.log_error(
                f"Error parsing message from Threat Bus. Expected a STIX-2 Sighting: {e}"
            )
            return
        if type(sighting) is not Sighting:
            self.opencti_helper.log_error(
                f"Error parsing message from Threat Bus. Expected a STIX-2 Sighting: {sighting}"
            )
            return
        entity_id = self._get_threatbus_entity().get("id", None)
        resp = self.opencti_helper.api.stix_sighting_relationship.create(
            fromId=sighting.sighting_of_ref,
            toId=entity_id,
            createdBy=entity_id,
            first_seen=sighting.first_seen.astimezone().strftime("%Y-%m-%dT%H:%M:%SZ")
            if sighting.get("first_seen")
            else None,
            last_seen=sighting.last_seen.astimezone().strftime("%Y-%m-%dT%H:%M:%SZ")
            if sighting.get("last_seen")
            else None,
            confidence=50,
            externalReferences=[sighting.sighting_of_ref],
            count=1,
        )
        self.opencti_helper.log_info(f"Created sighting {resp}")

    def _map_to_threatbus(
        self, data: dict, opencti_action: str
    ) -> Union[Indicator, None]:
        """
        Inspects the given OpenCTI data point and either returns a valid STIX-2
        Indicator or None.
        @param data A dict object with OpenCTI SSE data
        @param opencti_action A string indicating what happened to this item
            (either `create`, `update` or `delete`)
        @return a STIX-2 Indicator or None
        """
        opencti_id: str = data.get("x_opencti_id", None)
        if not opencti_id:
            self.opencti_helper.log_error(
                "Cannot process data without 'x_opencti_id' field"
            )
            return
        indicator: dict = self.opencti_helper.api.indicator.read(id=opencti_id)
        if not indicator:
            # we are only interested in indicators at this time
            return
        detection_enabled: bool = indicator.get("x_opencti_detection", False)
        if not detection_enabled and self.forward_all_iocs is not True:
            # only propagate indicators that are toggled for detection or the
            # user enabled forwarding of all indicators regardless of the toggle
            return
        # overwrite custom OpenCTI ID
        indicator["id"] = indicator.get("standard_id")
        if opencti_action == "update":
            indicator[
                ThreatBusSTIX2Constants.X_THREATBUS_UPDATE.value
            ] = Operation.EDIT.value
        if opencti_action == "delete":
            indicator[
                ThreatBusSTIX2Constants.X_THREATBUS_UPDATE.value
            ] = Operation.REMOVE.value
        return Indicator(**indicator, allow_custom=True)

    def _process_message(self, sse_msg: Event):
        """
        Invoked for every incoming SSE message from the OpenCTI endpoint
        @param sse_msg: the received SSE Event
        """
        try:
            data: dict = json.loads(sse_msg.data).get("data", None)
            if not data:
                return
            indicator = self._map_to_threatbus(data, sse_msg.event)
            if not indicator:
                return
            self.threatbus_helper.send(indicator.serialize())

        except Exception as e:
            self.opencti_helper.log_error(
                f"Error forwarding indicator to Threat Bus: {e}"
            )

    def start(self):
        self.opencti_helper.log_info("Starting Threat Bus connector")

        # Fork a new Thread to communicate with Threat Bus
        self.threatbus_helper.start()
        atexit.register(self.threatbus_helper.stop)

        # Send the main loop into a busy loop for processing OpenCTI events
        self.opencti_helper.listen_stream(self._process_message)
Beispiel #8
0
class MalwareBazaarRecentAdditions:
    """
    Process recent additions to Malware Bazaar
    """

    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)
        self.identity = self.helper.api.identity.create(
            type="Organization",
            name="MalwareBazaar",
            description="For more info, see https://bazaar.abuse.ch/about/",
        )

        self.api_url = get_config_variable(
            "MALWAREBAZAAR_RECENT_ADDITIONS_API_URL",
            ["malwarebazaar_recent_additions", "api_url"],
            config,
        )

        self.cooldown_seconds = get_config_variable(
            "MALWAREBAZAAR_RECENT_ADDITIONS_COOLDOWN_SECONDS",
            ["malwarebazaar_recent_additions", "cooldown_seconds"],
            config,
        )
        self.cooldown_seconds = int(self.cooldown_seconds)

        self.labels_color = get_config_variable(
            "MALWAREBAZAAR_RECENT_ADDITIONS_LABELS_COLOR",
            ["malwarebazaar_recent_additions", "labels_color"],
            config,
        )

        self.include_tags = get_config_variable(
            "MALWAREBAZAAR_RECENT_ADDITIONS_INCLUDE_TAGS",
            ["malwarebazaar_recent_additions", "include_tags"],
            config,
        )
        if self.include_tags:
            self.include_tags = self.include_tags.split(",")

        self.include_reporters = get_config_variable(
            "MALWAREBAZAAR_RECENT_ADDITIONS_INCLUDE_REPORTERS",
            ["malwarebazaar_recent_additions", "include_reporters"],
            config,
        )
        if self.include_reporters:
            self.include_reporters = self.include_reporters.split(",")

        labels = get_config_variable(
            "MALWAREBAZAAR_RECENT_ADDITIONS_LABELS",
            ["malwarebazaar_recent_additions", "labels"],
            config,
        )
        self.label_ids = []

        # Create default labels
        if labels:
            labels = labels.split(",")
            for label in labels:
                created_label = self.helper.api.label.create(
                    value=label, color=self.labels_color
                )
                self.label_ids.append(created_label["id"])

    def run(self):
        self.helper.log_info("Starting MalwareBazaar Recent Additions Connector")
        while True:
            try:

                recent_additions_list = self.get_recent_additions()
                for recent_additions_dict in recent_additions_list:

                    self.helper.log_info(f"Processing: {recent_additions_dict}")
                    sha256 = recent_additions_dict["sha256_hash"]
                    reporter = recent_additions_dict["reporter"]
                    file_name = recent_additions_dict["file_name"]
                    tags = (
                        recent_additions_dict["tags"]
                        if recent_additions_dict["tags"]
                        else []
                    )

                    # If the artifact has an excluded tag, skip processing
                    if self.include_reporters:
                        if not reporter in self.include_reporters:
                            self.helper.log_info(
                                f"Skipping {sha256} as it was from a reporter not in the included list: {reporter}"
                            )
                            continue

                    if self.include_tags:
                        if not any(x in tags for x in self.include_tags):
                            self.helper.log_info(
                                f"Skipping {sha256} as it did not contain a tag in the included list."
                            )
                            continue

                    # If the artifact already exists in OpenCTI skip it
                    if self.artifact_exists_opencti(sha256):
                        self.helper.log_info(
                            f'Skipping Artifact with "{sha256}" as it already exists in OpenCTI.'
                        )
                        continue

                    # Download the artifact and unzip with default "infected" password
                    file_contents = self.download_unzip(sha256)

                    # Upload the artifact to OpenCTI
                    response = self.upload_artifact_opencti(
                        file_name,
                        file_contents,
                        f"Uploaded to MalwareBazaar by Twitter user: {reporter}.",
                    )

                    # Create external reference to MalwareBazaar report
                    external_reference = self.helper.api.external_reference.create(
                        source_name="MalwareBazaar Recent Additions",
                        url=f"https://bazaar.abuse.ch/sample/{sha256}/",
                        description="MalwareBazaar Recent Additions",
                    )
                    self.helper.api.stix_cyber_observable.add_external_reference(
                        id=response["id"],
                        external_reference_id=external_reference["id"],
                    )

                    # Attach all default labels if any
                    for label_id in self.label_ids:
                        self.helper.api.stix_cyber_observable.add_label(
                            id=response["id"], label_id=label_id
                        )

                    # Attach all tags as labels if any
                    for tag in tags:
                        label = self.helper.api.label.create(
                            value=tag,
                            color=self.labels_color,
                        )
                        self.helper.api.stix_cyber_observable.add_label(
                            id=response["id"], label_id=label["id"]
                        )

                self.helper.log_info(
                    f"Re-checking for new additions in {self.cooldown_seconds} seconds..."
                )
                time.sleep(self.cooldown_seconds)

            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)

            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(self.cooldown_seconds)

    def get_recent_additions(self):
        """
        Get recent additions to MalwareBazaar.

        See https://bazaar.abuse.ch/api/#latest_additions

        returns: a dict containing the recent additions in the last
                 60 minutes to MalwareBazaar.
        """

        data = {"query": "get_recent", "selector": "time"}
        resp = requests.post(self.api_url, data=data)

        # Handle the response data

        recent_additions_list = resp.json()
        return recent_additions_list["data"]

    def download_unzip(self, sha256):
        """
        Download and unzip a sample from MalwareBazaar.

        sha256: a str representing the sample's sha256.
        returns: a bytes object containing the contents of the file
        """
        data = {"query": "get_file", "sha256_hash": sha256}
        resp = requests.post(self.api_url, data=data)
        zip_contents = resp.content
        zip_obj = io.BytesIO(zip_contents)
        zip_file = pyzipper.AESZipFile(zip_obj)
        zip_file.setpassword(b"infected")
        file_name = zip_file.namelist()[0]
        return zip_file.read(file_name)

    def artifact_exists_opencti(self, sha256):
        """
        Determine whether or not an Artifact already exists in OpenCTI.

        sha256: a str representing the sha256 of the artifact's file contents
        returns: a bool indicidating the aforementioned
        """

        response = self.helper.api.stix_cyber_observable.read(
            filters=[{"key": "hashes_SHA256", "values": [sha256]}]
        )

        if response:
            return True
        return False

    def upload_artifact_opencti(self, file_name, file_contents, description):
        """
        Upload a file to OpenCTI.

        file_name: a str representing the name of the file
        file_contents: a bytes object representing the file contents
        description: a str representing the description for the upload

        returns: response of upload
        """

        mime_type = magic.from_buffer(file_contents, mime=True)

        kwargs = {
            "file_name": file_name,
            "data": file_contents,
            "mime_type": mime_type,
            "x_opencti_description": description,
        }

        return self.helper.api.stix_cyber_observable.upload_artifact(**kwargs)
class Misp:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.misp_url = get_config_variable("MISP_URL", ["misp", "url"], config)
        self.misp_key = get_config_variable("MISP_KEY", ["misp", "key"], config)
        self.misp_ssl_verify = get_config_variable(
            "MISP_SSL_VERIFY", ["misp", "ssl_verify"], config
        )
        self.misp_create_report = get_config_variable(
            "MISP_CREATE_REPORTS", ["misp", "create_reports"], config
        )
        self.misp_report_class = (
            get_config_variable("MISP_REPORT_CLASS", ["misp", "report_class"], config)
            or "MISP Event"
        )
        self.misp_import_from_date = get_config_variable(
            "MISP_IMPORT_FROM_DATE", ["misp", "import_from_date"], config
        )
        self.misp_import_tags = get_config_variable(
            "MISP_IMPORT_TAGS", ["misp", "import_tags"], config
        )
        self.misp_interval = get_config_variable(
            "MISP_INTERVAL", ["misp", "interval"], config, True
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

        # Initialize MISP
        self.misp = ExpandedPyMISP(
            url=self.misp_url, key=self.misp_key, ssl=self.misp_ssl_verify, debug=False
        )

    def get_interval(self):
        return int(self.misp_interval) * 60

    def run(self):
        while True:
            timestamp = int(time.time())
            # Get the last_run datetime
            current_state = self.helper.get_state()
            if current_state is not None and "last_run" in current_state:
                last_run = datetime.utcfromtimestamp(
                    current_state["last_run"]
                ).strftime("%Y-%m-%d %H:%M:%S")
                self.helper.log_info("Connector last run: " + last_run)
            else:
                last_run = None
                self.helper.log_info("Connector has never run")

            # If import with tags
            complex_query_tag = None
            if self.misp_import_tags is not None:
                or_parameters = []
                for tag in self.misp_import_tags.split(","):
                    or_parameters.append(tag.strip())
                    complex_query_tag = self.misp.build_complex_query(
                        or_parameters=or_parameters
                    )

            # If import from a specific date
            import_from_date = None
            if self.misp_import_from_date is not None:
                import_from_date = parse(self.misp_import_from_date).strftime(
                    "%Y-%m-%d %H:%M:%S"
                )

            # Prepare the query
            kwargs = dict()
            if complex_query_tag is not None:
                kwargs["tags"] = complex_query_tag
            if last_run is not None:
                kwargs["timestamp"] = last_run
            elif import_from_date is not None:
                kwargs["date_from"] = import_from_date

            # Query with pagination of 100
            current_page = 1
            while True:
                kwargs["limit"] = 50
                kwargs["page"] = current_page
                self.helper.log_info(
                    "Fetching MISP events with args: " + json.dumps(kwargs)
                )
                events = []
                try:
                    events = self.misp.search("events", **kwargs)
                except Exception as e:
                    self.helper.log_error(str(e))
                    try:
                        events = self.misp.search("events", **kwargs)
                    except Exception as e:
                        self.helper.log_error(str(e))

                self.helper.log_info("MISP returned " + str(len(events)) + " events.")
                # Break if no more result
                if len(events) == 0:
                    break
                try:
                    self.process_events(events)
                except Exception as e:
                    self.helper.log_error(str(e))
                current_page += 1
            self.helper.set_state({"last_run": timestamp})
            time.sleep(self.get_interval())

    def process_events(self, events):
        for event in events:
            self.helper.log_info("Processing event " + event["Event"]["uuid"])
            ### Default variables
            added_markings = []
            added_entities = []
            added_object_refs = []

            ### Pre-process
            # Author
            author = Identity(
                name=event["Event"]["Orgc"]["name"], identity_class="organization"
            )
            # Elements
            event_elements = self.prepare_elements(event["Event"]["Galaxy"], author)
            # Markings
            if "Tag" in event["Event"]:
                event_markings = self.resolve_markings(event["Event"]["Tag"])
            else:
                event_markings = [TLP_WHITE]
            # Tags
            event_tags = []
            if "Tag" in event["Event"]:
                event_tags = self.resolve_tags(event["Event"]["Tag"])
            # ExternalReference
            event_external_reference = ExternalReference(
                source_name=self.helper.connect_name,
                external_id=event["Event"]["uuid"],
                url=self.misp_url + "/events/view/" + event["Event"]["uuid"],
            )

            ### Get indicators
            event_external_references = [event_external_reference]
            indicators = []
            # Get attributes
            for attribute in event["Event"]["Attribute"]:
                indicator = self.process_attribute(
                    author, event_elements, event_markings, [], attribute
                )
                if attribute["type"] == "link":
                    event_external_references.append(
                        ExternalReference(
                            source_name=attribute["category"],
                            external_id=attribute["uuid"],
                            url=attribute["value"],
                        )
                    )
                if indicator is not None:
                    indicators.append(indicator)
            # Get attributes of objects
            objects_relationships = []
            for object in event["Event"]["Object"]:
                attribute_external_references = []
                for attribute in object["Attribute"]:
                    if attribute["type"] == "link":
                        attribute_external_references.append(
                            ExternalReference(
                                source_name=attribute["category"],
                                external_id=attribute["uuid"],
                                url=attribute["value"],
                            )
                        )
                object_attributes = []
                for attribute in object["Attribute"]:
                    indicator = self.process_attribute(
                        author,
                        event_elements,
                        event_markings,
                        attribute_external_references,
                        attribute,
                    )
                    if indicator is not None:
                        indicators.append(indicator)
                        if (
                            object["meta-category"] == "file"
                            and indicator["indicator"].x_opencti_observable_type
                            in FILETYPES
                        ):
                            object_attributes.append(indicator)
                objects_relationships.extend(
                    self.process_observable_relations(object_attributes, [])
                )

            ### Prepare the bundle
            bundle_objects = [author]
            object_refs = []
            # Add event markings
            for event_marking in event_markings:
                if event_marking["id"] not in added_markings:
                    bundle_objects.append(event_marking)
                    added_markings.append(event_marking["id"])
            # Add event elements
            all_event_elements = (
                event_elements["intrusion_sets"]
                + event_elements["malwares"]
                + event_elements["tools"]
                + event_elements["attack_patterns"]
            )
            for event_element in all_event_elements:
                if event_element["name"] not in added_object_refs:
                    object_refs.append(event_element)
                    added_object_refs.append(event_element["name"])
                if event_element["name"] not in added_entities:
                    bundle_objects.append(event_element)
                    added_entities.append(event_element["name"])
            # Add indicators
            for indicator in indicators:
                if indicator["indicator"]["id"] not in added_object_refs:
                    object_refs.append(indicator["indicator"])
                    added_object_refs.append(indicator["indicator"]["id"])
                if indicator["indicator"]["id"] not in added_entities:
                    bundle_objects.append(indicator["indicator"])
                    added_entities.append(indicator["indicator"]["id"])
                # Add attribute markings
                for attribute_marking in indicator["markings"]:
                    if attribute_marking["id"] not in added_markings:
                        bundle_objects.append(attribute_marking)
                        added_markings.append(attribute_marking["id"])
                # Add attribute elements
                all_attribute_elements = (
                    indicator["attribute_elements"]["intrusion_sets"]
                    + indicator["attribute_elements"]["malwares"]
                    + indicator["attribute_elements"]["tools"]
                    + indicator["attribute_elements"]["attack_patterns"]
                )
                for attribute_element in all_attribute_elements:
                    if attribute_element["name"] not in added_object_refs:
                        object_refs.append(attribute_element)
                        added_object_refs.append(attribute_element["name"])
                    if attribute_element["name"] not in added_entities:
                        bundle_objects.append(attribute_element)
                        added_entities.append(attribute_element["name"])
                # Add attribute relationships
                for relationship in indicator["relationships"]:
                    object_refs.append(relationship)
                    bundle_objects.append(relationship)
            # Add object_relationships
            for object_relationship in objects_relationships:
                bundle_objects.append(object_relationship)

            ### Create the report if needed
            if self.misp_create_report and len(object_refs) > 0:
                report = Report(
                    name=event["Event"]["info"],
                    description=event["Event"]["info"],
                    published=parse(event["Event"]["date"]),
                    created_by_ref=author,
                    object_marking_refs=event_markings,
                    labels=["threat-report"],
                    object_refs=object_refs,
                    external_references=event_external_references,
                    custom_properties={
                        "x_opencti_report_class": self.misp_report_class,
                        "x_opencti_object_status": 2,
                        "x_opencti_tags": event_tags,
                    },
                )
                bundle_objects.append(report)
            bundle = Bundle(objects=bundle_objects).serialize()
            self.helper.log_info("Sending event STIX2 bundle")
            self.helper.send_stix2_bundle(
                bundle, None, self.update_existing_data, False
            )

    def process_attribute(
        self,
        author,
        event_elements,
        event_markings,
        attribute_external_references,
        attribute,
    ):
        try:
            resolved_attributes = self.resolve_type(
                attribute["type"], attribute["value"]
            )
            if resolved_attributes is None:
                return None

            for resolved_attribute in resolved_attributes:
                ### Pre-process
                # Elements
                attribute_elements = self.prepare_elements(attribute["Galaxy"], author)
                # Markings & Tags
                attribute_tags = []
                if "Tag" in attribute:
                    attribute_markings = self.resolve_markings(
                        attribute["Tag"], with_default=False
                    )
                    attribute_tags = self.resolve_tags(attribute["Tag"])
                    if len(attribute_markings) == 0:
                        attribute_markings = event_markings
                else:
                    attribute_markings = event_markings

                ### Create the indicator
                observable_type = resolved_attribute["type"]
                observable_value = resolved_attribute["value"]
                name = resolved_attribute["value"]
                pattern_type = "stix"
                # observable type is yara for instance
                if observable_type in PATTERNTYPES:
                    pattern_type = observable_type
                    observable_type = "Unknown"
                    genuine_pattern = (
                        "[file:hashes.md5 = 'd41d8cd98f00b204e9800998ecf8427e']"
                    )
                    pattern = observable_value
                    name = (
                        attribute["comment"]
                        if len(attribute["comment"]) > 0
                        else observable_type
                    )
                # observable type is not in stix 2
                elif observable_type not in OPENCTISTIX2:
                    return None
                # observable type is in stix
                else:
                    if "transform" in OPENCTISTIX2[observable_type]:
                        if (
                            OPENCTISTIX2[observable_type]["transform"]["operation"]
                            == "remove_string"
                        ):
                            observable_value = observable_value.replace(
                                OPENCTISTIX2[observable_type]["transform"]["value"], ""
                            )
                    lhs = ObjectPath(
                        OPENCTISTIX2[observable_type]["type"],
                        OPENCTISTIX2[observable_type]["path"],
                    )
                    genuine_pattern = str(
                        ObservationExpression(
                            EqualityComparisonExpression(lhs, observable_value)
                        )
                    )
                    pattern = genuine_pattern

                indicator = Indicator(
                    name=name,
                    description=attribute["comment"],
                    pattern=genuine_pattern,
                    valid_from=datetime.utcfromtimestamp(
                        int(attribute["timestamp"])
                    ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                    labels=["malicious-activity"],
                    created_by_ref=author,
                    object_marking_refs=attribute_markings,
                    external_references=attribute_external_references,
                    custom_properties={
                        "x_opencti_indicator_pattern": pattern,
                        "x_opencti_observable_type": observable_type,
                        "x_opencti_observable_value": observable_value,
                        "x_opencti_pattern_type": pattern_type,
                        "x_opencti_tags": attribute_tags,
                    },
                )

                ### Create the relationships
                relationships = []
                # Event threats
                for threat in (
                    event_elements["intrusion_sets"]
                    + event_elements["malwares"]
                    + event_elements["tools"]
                ):
                    relationships.append(
                        Relationship(
                            relationship_type="indicates",
                            created_by_ref=author,
                            source_ref=indicator.id,
                            target_ref=threat.id,
                            description=attribute["comment"],
                            object_marking_refs=attribute_markings,
                            custom_properties={
                                "x_opencti_first_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_last_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_weight": self.helper.connect_confidence_level,
                            },
                        )
                    )
                # Attribute threats
                for threat in (
                    attribute_elements["intrusion_sets"]
                    + attribute_elements["malwares"]
                    + attribute_elements["tools"]
                ):
                    relationships.append(
                        Relationship(
                            relationship_type="indicates",
                            created_by_ref=author,
                            source_ref=indicator.id,
                            target_ref=threat.id,
                            description=attribute["comment"],
                            object_marking_refs=attribute_markings,
                            custom_properties={
                                "x_opencti_first_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_last_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_weight": self.helper.connect_confidence_level,
                            },
                        )
                    )
                # Event Attack Patterns
                for attack_pattern in event_elements["attack_patterns"]:
                    if len(event_elements["malwares"]) > 0:
                        threats = event_elements["malwares"]
                    elif len(event_elements["intrusion_sets"]) > 0:
                        threats = event_elements["intrusion_sets"]
                    else:
                        threats = []
                    for threat in threats:
                        relationship_uses = Relationship(
                            relationship_type="uses",
                            created_by_ref=author,
                            source_ref=threat.id,
                            target_ref=attack_pattern.id,
                            description=attribute["comment"],
                            object_marking_refs=attribute_markings,
                            custom_properties={
                                "x_opencti_first_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_last_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_weight": self.helper.connect_confidence_level,
                                "x_opencti_ignore_dates": True,
                            },
                        )
                        relationships.append(relationship_uses)
                        relationship_indicates = Relationship(
                            relationship_type="indicates",
                            created_by_ref=author,
                            source_ref=indicator.id,
                            target_ref="malware--fa42a846-8d90-4e51-bc29-71d5b4802168",  # Fake
                            description=attribute["comment"],
                            object_marking_refs=attribute_markings,
                            custom_properties={
                                "x_opencti_first_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_last_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_weight": self.helper.connect_confidence_level,
                                "x_opencti_source_ref": indicator.id,
                                "x_opencti_target_ref": relationship_uses.id,
                            },
                        )
                        relationships.append(relationship_indicates)
                # Attribute Attack Patterns
                for attack_pattern in attribute_elements["attack_patterns"]:
                    if len(attribute_elements["malwares"]) > 0:
                        threats = attribute_elements["malwares"]
                    elif len(attribute_elements["intrusion_sets"]) > 0:
                        threats = attribute_elements["intrusion_sets"]
                    else:
                        threats = []
                    for threat in threats:
                        relationship_uses = Relationship(
                            relationship_type="uses",
                            created_by_ref=author,
                            source_ref=threat.id,
                            target_ref=attack_pattern.id,
                            description=attribute["comment"],
                            object_marking_refs=attribute_markings,
                            custom_properties={
                                "x_opencti_first_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_last_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_weight": self.helper.connect_confidence_level,
                                "x_opencti_ignore_dates": True,
                            },
                        )
                        relationships.append(relationship_uses)
                        relationship_indicates = Relationship(
                            relationship_type="indicates",
                            created_by_ref=author,
                            source_ref=indicator.id,
                            target_ref="malware--fa42a846-8d90-4e51-bc29-71d5b4802168",  # Fake
                            description=attribute["comment"],
                            object_marking_refs=attribute_markings,
                            custom_properties={
                                "x_opencti_first_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_last_seen": datetime.utcfromtimestamp(
                                    int(attribute["timestamp"])
                                ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                                "x_opencti_weight": self.helper.connect_confidence_level,
                                "x_opencti_source_ref": indicator.id,
                                "x_opencti_target_ref": relationship_uses.id,
                                "x_opencti_ignore_dates": True,
                            },
                        )
                        relationships.append(relationship_indicates)

                return {
                    "indicator": indicator,
                    "relationships": relationships,
                    "attribute_elements": attribute_elements,
                    "markings": attribute_markings,
                }
        except:
            return None

    def process_observable_relations(
        self, object_attributes, result_table, start_element=0
    ):
        if start_element == 0:
            result_table = []
        if len(object_attributes) == 1:
            return []

        for x in range(start_element + 1, len(object_attributes)):
            result_table.append(
                Relationship(
                    relationship_type="corresponds",
                    source_ref=object_attributes[start_element]["indicator"]["id"],
                    target_ref=object_attributes[x]["indicator"]["id"],
                    description="Same file",
                    custom_properties={"x_opencti_ignore_dates": True},
                )
            )
        if start_element != len(object_attributes):
            return self.process_observable_relations(
                object_attributes, result_table, start_element + 1
            )
        else:
            return result_table

    def prepare_elements(self, galaxies, author):
        elements = {
            "intrusion_sets": [],
            "malwares": [],
            "tools": [],
            "attack_patterns": [],
        }
        added_names = []
        for galaxy in galaxies:
            # Get the linked intrusion sets
            if (
                (
                    galaxy["namespace"] == "mitre-attack"
                    and galaxy["name"] == "Intrusion Set"
                )
                or (galaxy["namespace"] == "misp" and galaxy["name"] == "Threat Actor")
                or (
                    galaxy["namespace"] == "misp"
                    and galaxy["name"] == "Microsoft Activity Group actor"
                )
            ):
                for galaxy_entity in galaxy["GalaxyCluster"]:
                    if " - G" in galaxy_entity["value"]:
                        name = galaxy_entity["value"].split(" - G")[0]
                    elif "APT " in galaxy_entity["value"]:
                        name = galaxy_entity["value"].replace("APT ", "APT")
                    else:
                        name = galaxy_entity["value"]
                    if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]:
                        aliases = galaxy_entity["meta"]["synonyms"]
                    else:
                        aliases = [name]
                    if name not in added_names:
                        elements["intrusion_sets"].append(
                            IntrusionSet(
                                name=name,
                                labels=["intrusion-set"],
                                description=galaxy_entity["description"],
                                created_by_ref=author,
                                custom_properties={"x_opencti_aliases": aliases},
                            )
                        )
                        added_names.append(name)
            # Get the linked malwares
            if (
                (galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Malware")
                or (galaxy["namespace"] == "misp" and galaxy["name"] == "Tool")
                or (galaxy["namespace"] == "misp" and galaxy["name"] == "Ransomware")
                or (galaxy["namespace"] == "misp" and galaxy["name"] == "Android")
                or (galaxy["namespace"] == "misp" and galaxy["name"] == "Malpedia")
            ):
                for galaxy_entity in galaxy["GalaxyCluster"]:
                    if " - S" in galaxy_entity["value"]:
                        name = galaxy_entity["value"].split(" - S")[0]
                    else:
                        name = galaxy_entity["value"]
                    if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]:
                        aliases = galaxy_entity["meta"]["synonyms"]
                    else:
                        aliases = [name]
                    if name not in added_names:
                        elements["malwares"].append(
                            Malware(
                                name=name,
                                labels=["malware"],
                                description=galaxy_entity["description"],
                                created_by_ref=author,
                                custom_properties={"x_opencti_aliases": aliases},
                            )
                        )
                        added_names.append(name)
            # Get the linked tools
            if galaxy["namespace"] == "mitre-attack" and galaxy["name"] == "Tool":
                for galaxy_entity in galaxy["GalaxyCluster"]:
                    if " - S" in galaxy_entity["value"]:
                        name = galaxy_entity["value"].split(" - S")[0]
                    else:
                        name = galaxy_entity["value"]
                    if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]:
                        aliases = galaxy_entity["meta"]["synonyms"]
                    else:
                        aliases = [name]
                    if name not in added_names:
                        elements["tools"].append(
                            Tool(
                                name=name,
                                labels=["tool"],
                                description=galaxy_entity["description"],
                                created_by_ref=author,
                                custom_properties={"x_opencti_aliases": aliases},
                            )
                        )
                        added_names.append(name)
            # Get the linked attack_patterns
            if (
                galaxy["namespace"] == "mitre-attack"
                and galaxy["name"] == "Attack Pattern"
            ):
                for galaxy_entity in galaxy["GalaxyCluster"]:
                    if " - T" in galaxy_entity["value"]:
                        name = galaxy_entity["value"].split(" - T")[0]
                    else:
                        name = galaxy_entity["value"]
                    if "meta" in galaxy_entity and "synonyms" in galaxy_entity["meta"]:
                        aliases = galaxy_entity["meta"]["synonyms"]
                    else:
                        aliases = [name]
                    if name not in added_names:
                        elements["attack_patterns"].append(
                            AttackPattern(
                                name=name,
                                labels=["attack-pattern"],
                                description=galaxy_entity["description"],
                                created_by_ref=author,
                                custom_properties={
                                    "x_opencti_external_id": galaxy_entity["meta"][
                                        "external_id"
                                    ][0],
                                    "x_opencti_aliases": aliases,
                                },
                            )
                        )
                        added_names.append(name)
        return elements

    def resolve_type(self, type, value):
        types = {
            "yara": ["yara"],
            "md5": ["file-md5"],
            "sha1": ["file-sha1"],
            "sha256": ["file-sha256"],
            "filename": ["file-name"],
            "pdb": ["pdb-path"],
            "filename|md5": ["file-name", "file-md5"],
            "filename|sha1": ["file-name", "file-sha1"],
            "filename|sha256": ["file-name", "file-sha256"],
            "ip-src": ["ipv4-addr"],
            "ip-dst": ["ipv4-addr"],
            "hostname": ["domain"],
            "domain": ["domain"],
            "domain|ip": ["domain", "ipv4-addr"],
            "url": ["url"],
            "windows-service-name": ["windows-service-name"],
            "windows-service-displayname": ["windows-service-display-name"],
            "windows-scheduled-task": ["windows-scheduled-task"],
        }
        if type in types:
            resolved_types = types[type]
            if len(resolved_types) == 2:
                values = value.split("|")
                if resolved_types[0] == "ipv4-addr":
                    type_0 = self.detect_ip_version(values[0])
                else:
                    type_0 = resolved_types[0]
                if resolved_types[1] == "ipv4-addr":
                    type_1 = self.detect_ip_version(values[1])
                else:
                    type_1 = resolved_types[1]
                return [
                    {"type": type_0, "value": values[0]},
                    {"type": type_1, "value": values[1]},
                ]
            else:
                if resolved_types[0] == "ipv4-addr":
                    type_0 = self.detect_ip_version(value)
                else:
                    type_0 = resolved_types[0]
                return [{"type": type_0, "value": value}]

    def detect_ip_version(self, value):
        if len(value) > 16:
            return "ipv6-addr"
        else:
            return "ipv4-addr"

    def resolve_markings(self, tags, with_default=True):
        markings = []
        for tag in tags:
            if tag["name"] == "tlp:white":
                markings.append(TLP_WHITE)
            if tag["name"] == "tlp:green":
                markings.append(TLP_GREEN)
            if tag["name"] == "tlp:amber":
                markings.append(TLP_AMBER)
            if tag["name"] == "tlp:red":
                markings.append(TLP_RED)
        if len(markings) == 0 and with_default:
            markings.append(TLP_WHITE)
        return markings

    def resolve_tags(self, tags):
        opencti_tags = []
        for tag in tags:
            if (
                tag["name"] != "tlp:white"
                and tag["name"] != "tlp:green"
                and tag["name"] != "tlp:amber"
                and tag["name"] != "tlp:red"
                and not tag["name"].startswith("misp-galaxy:mitre-threat-actor")
                and not tag["name"].startswith("misp-galaxy:mitre-intrusion-set")
                and not tag["name"].startswith("misp-galaxy:mitre-malware")
                and not tag["name"].startswith("misp-galaxy:mitre-attack-pattern")
                and not tag["name"].startswith("misp-galaxy:mitre-tool")
                and not tag["name"].startswith("misp-galaxy:tool")
                and not tag["name"].startswith("misp-galaxy:ransomware")
                and not tag["name"].startswith("misp-galaxy:malpedia")
            ):
                tag_value = tag["name"]
                if '="' in tag["name"]:
                    tag_value_split = tag["name"].split('="')
                    tag_value = tag_value_split[1][:-1].strip()
                elif ":" in tag["name"]:
                    tag_value_split = tag["name"].split(":")
                    tag_value = tag_value_split[1].strip()
                if tag_value.isdigit():
                    if ":" in tag["name"]:
                        tag_value_split = tag["name"].split(":")
                        tag_value = tag_value_split[1].strip()
                    else:
                        tag_value = tag["name"]
                opencti_tags.append(
                    {"tag_type": "MISP", "value": tag_value, "color": "#008ac8"}
                )
        return opencti_tags
Beispiel #10
0
class FireEye:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.fireeye_api_url = get_config_variable("FIREEYE_API_URL",
                                                   ["fireeye", "api_url"],
                                                   config)
        self.fireeye_api_v3_public = get_config_variable(
            "FIREEYE_API_V3_PUBLIC", ["fireeye", "api_v3_public"], config)
        self.fireeye_api_v3_secret = get_config_variable(
            "FIREEYE_API_V3_SECRET", ["fireeye", "api_v3_secret"], config)
        self.fireeye_collections = get_config_variable(
            "FIREEYE_COLLECTIONS", ["fireeye", "collections"],
            config).split(",")
        self.fireeye_import_start_date = get_config_variable(
            "FIREEYE_IMPORT_START_DATE",
            ["fireeye", "import_start_date"],
            config,
        )
        self.fireeye_interval = get_config_variable("FIREEYE_INTERVAL",
                                                    ["fireeye", "interval"],
                                                    config, True)
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.added_after = parse(self.fireeye_import_start_date).timestamp()

        self.identity = self.helper.api.identity.create(
            type="Organization",
            name="FireEye, Inc.",
            description=
            "FireEye is a publicly traded cybersecurity company headquartered in Milpitas, California. It has been involved in the detection and prevention of major cyber attacks. It provides hardware, software, and services to investigate cybersecurity attacks, protect against malicious software, and analyze IT security risks. FireEye was founded in 2004.",
        )

        self.marking = self.helper.api.marking_definition.create(
            definition_type="COMMERCIAL",
            definition="FIREEYE",
            x_opencti_order=99,
            x_opencti_color="#a01526",
        )

        # Init variables
        self.auth_token = None
        self._get_token()

    def get_interval(self):
        return int(self.fireeye_interval) * 60

    def _get_token(self):
        r = requests.post(
            self.fireeye_api_url + "/token",
            auth=HTTPBasicAuth(self.fireeye_api_v3_public,
                               self.fireeye_api_v3_secret),
            data={"grant_type": "client_credentials"},
        )
        if r.status_code != 200:
            raise ValueError("FireEye Authentication failed")
        data = r.json()
        self.auth_token = data.get("access_token")

    def _search(self, stix_id, retry=False):
        time.sleep(3)
        self.helper.log_info("Searching for " + stix_id)
        headers = {
            "authorization": "Bearer " + self.auth_token,
            "accept": "application/vnd.oasis.stix+json; version=2.1",
            "x-app-name": "opencti-connector-4.3.0",
        }
        body = """
            {
                "queries": [
                    {
                        "type": "ENTITY_TYPE",
                        "query": "id = 'ENTITY_ID'"
                    }
                ],
                "include_connected_objects": false
            }
        """
        entity_type = stix_id.split("--")[0]
        if entity_type not in searchable_types:
            return None
        body = body.replace("ENTITY_TYPE",
                            entity_type).replace("ENTITY_ID", stix_id)
        r = requests.post(self.fireeye_api_url + "/collections/search",
                          data=body,
                          headers=headers)
        if r.status_code == 200:
            return r
        elif (r.status_code == 401 or r.status_code == 403) and not retry:
            self._get_token()
            return self._search(stix_id, True)
        elif r.status_code == 204 or r.status_code == 205:
            return None
        elif r.status_code == 401 or r.status_code == 403:
            raise ValueError("Query failed, permission denied")
        else:
            print(r)
            raise ValueError("An unknown error occurred")

    def _query(self, url, retry=False):
        headers = {
            "authorization": "Bearer " + self.auth_token,
            "accept": "application/vnd.oasis.stix+json; version=2.1",
            "x-app-name": "opencti-connector-4.3.0",
        }
        r = requests.get(url, headers=headers)
        if r.status_code == 200:
            return r
        elif (r.status_code == 401 or r.status_code == 403) and not retry:
            self._get_token()
            return self._query(url, True)
        elif r.status_code == 401 or r.status_code == 403:
            raise ValueError("Query failed, permission denied")
        else:
            raise ValueError("An unknown error occurred")

    def _send_entity(self, bundle, work_id):
        if "objects" in bundle and len(bundle) > 0:
            final_objects = []
            for stix_object in bundle["objects"]:
                if stix_object["type"] == "threat-actor":
                    stix_object["type"] = "intrusion-set"
                    stix_object["id"] = stix_object["id"].replace(
                        "threat-actor", "intrusion-set")
                if "created_by_ref" not in stix_object:
                    stix_object["created_by_ref"] = self.identity[
                        "standard_id"]
                if stix_object["type"] != "marking-definition":
                    stix_object["object_marking_refs"] = [
                        "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82"
                    ]
                    stix_object["object_marking_refs"].append(
                        self.marking["standard_id"])
                final_objects.append(stix_object)
            final_bundle = {"type": "bundle", "objects": final_objects}
            self.helper.send_stix2_bundle(
                json.dumps(final_bundle),
                update=self.update_existing_data,
                work_id=work_id,
            )

    def _import_collection(self,
                           collection,
                           last_id_modified_timestamp=None,
                           last_id=None,
                           work_id=None):
        have_next_page = True
        url = None
        last_object = None
        while have_next_page:
            if url is None:
                if last_id_modified_timestamp is not None:
                    url = (self.fireeye_api_url + "/collections/" +
                           collection + "/objects" + "?added_after=" +
                           str(self.added_after) + "&length=100" +
                           "&last_id_modified_timestamp=" +
                           str(last_id_modified_timestamp))
                else:
                    url = (self.fireeye_api_url + "/collections/" +
                           collection + "/objects" + "?added_after=" +
                           str(self.added_after) + "&length=100")
            result = self._query(url)
            parsed_result = json.loads(result.text)
            if "objects" in parsed_result and len(parsed_result) > 0:
                last_object = parsed_result["objects"][-1]
                object_ids = [
                    stix_object["id"]
                    for stix_object in parsed_result["objects"]
                ]
                if last_object["id"] != last_id:
                    final_objects = []
                    for stix_object in parsed_result["objects"]:
                        if stix_object["type"] == "threat-actor":
                            stix_object["type"] = "intrusion-set"
                            stix_object["id"] = stix_object["id"].replace(
                                "threat-actor", "intrusion-set")
                        if stix_object["type"] == "relationship":
                            # If the source_ref is not in the current bundle
                            if stix_object["source_ref"] not in object_ids:
                                # Search entity in OpenCTI
                                opencti_entity = (
                                    self.helper.api.stix_domain_object.read(
                                        id=stix_object["source_ref"]))
                                # If the entity is not found
                                if opencti_entity is None:
                                    # Search the entity in FireEye
                                    fireeye_entity = self._search(
                                        stix_object["source_ref"])
                                    # If the entity is found
                                    if fireeye_entity is not None:
                                        fireeye_entity_decoded = json.loads(
                                            fireeye_entity.text)
                                        # Send the entity before this bundle
                                        self._send_entity(
                                            fireeye_entity_decoded, work_id)
                            stix_object["source_ref"] = stix_object[
                                "source_ref"].replace("threat-actor",
                                                      "intrusion-set")
                            # Search if the entity is not in bundle
                            if stix_object["target_ref"] not in object_ids:
                                opencti_entity = (
                                    self.helper.api.stix_domain_object.read(
                                        id=stix_object["target_ref"]))
                                if opencti_entity is None:
                                    fireeye_entity = self._search(
                                        stix_object["target_ref"])
                                    if fireeye_entity is not None:
                                        fireeye_entity_decoded = json.loads(
                                            fireeye_entity.text)
                                        self._send_entity(
                                            fireeye_entity_decoded, work_id)
                            stix_object["target_ref"] = stix_object[
                                "target_ref"].replace("threat-actor",
                                                      "intrusion-set")
                        if ("object_refs" in stix_object
                                and len(stix_object["object_refs"]) > 0):
                            for object_ref in stix_object["object_refs"]:
                                if object_ref not in object_ids:
                                    opencti_entity = (self.helper.api.
                                                      stix_domain_object.read(
                                                          id=object_ref))
                                    if opencti_entity is None:
                                        fireeye_entity = self._search(
                                            object_ref)
                                        if fireeye_entity is not None:
                                            fireeye_entity_decoded = json.loads(
                                                fireeye_entity.text)
                                            self._send_entity(
                                                fireeye_entity_decoded,
                                                work_id)
                        if "created_by_ref" not in stix_object:
                            stix_object["created_by_ref"] = self.identity[
                                "standard_id"]
                        if stix_object["type"] != "marking-definition":
                            stix_object["object_marking_refs"] = [
                                "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82"
                            ]
                            stix_object["object_marking_refs"].append(
                                self.marking["standard_id"])
                        final_objects.append(stix_object)
                    final_bundle = {"type": "bundle", "objects": final_objects}
                    self.helper.send_stix2_bundle(
                        json.dumps(final_bundle),
                        update=self.update_existing_data,
                        work_id=work_id,
                    )
                    headers = result.headers
                    if "Link" in headers:
                        have_next_page = True
                        link = headers["Link"].split(";")
                        url = link[0][1:-1]
                        last_id_modified_timestamp = parse_qs(
                            urlparse(
                                url).query)["last_id_modified_timestamp"][0]
                    else:
                        have_next_page = False
                else:
                    have_next_page = False
        return {
            "last_id_modified_timestamp": last_id_modified_timestamp,
            "last_id": last_object["id"] if "id" in last_object else None,
        }

    def run(self):
        while True:
            try:
                self.helper.log_info("Synchronizing with FireEye API...")
                timestamp = int(time.time())
                now = datetime.datetime.utcfromtimestamp(timestamp)
                friendly_name = "FireEye run @ " + now.strftime(
                    "%Y-%m-%d %H:%M:%S")
                work_id = self.helper.api.work.initiate_work(
                    self.helper.connect_id, friendly_name)
                current_state = self.helper.get_state()
                if (current_state is None
                        or "last_id_modified_timestamp" not in current_state):
                    self.helper.set_state({
                        "last_id_modified_timestamp": {
                            "indicators": None,
                            "reports": None,
                        },
                        "last_id": {
                            "indicators": None,
                            "reports": None,
                        },
                    })
                    current_state = self.helper.get_state()
                last_id_modified_timestamp = current_state[
                    "last_id_modified_timestamp"]
                last_id = current_state["last_id"]
                if "indicators" in self.fireeye_collections:
                    self.helper.log_info(
                        "Get indicators created after " +
                        str(last_id_modified_timestamp["indicators"]))
                    indicators_last = self._import_collection(
                        "indicators",
                        last_id_modified_timestamp["indicators"],
                        last_id["indicators"],
                        work_id,
                    )
                    current_state = self.helper.get_state()
                    self.helper.set_state({
                        "last_id_modified_timestamp": {
                            "indicators":
                            indicators_last["last_id_modified_timestamp"],
                            "reports":
                            current_state["last_id_modified_timestamp"]
                            ["reports"],
                        },
                        "last_id": {
                            "indicators": indicators_last["last_id"],
                            "reports": current_state["last_id"]["reports"],
                        },
                    })
                if "reports" in self.fireeye_collections:
                    self.helper.log_info(
                        "Get reports created after " +
                        str(last_id_modified_timestamp["reports"]))
                    reports_last = self._import_collection(
                        "reports",
                        last_id_modified_timestamp["reports"],
                        last_id["reports"],
                        work_id,
                    )
                    current_state = self.helper.get_state()
                    self.helper.set_state({
                        "last_id_modified_timestamp": {
                            "indicators":
                            current_state["last_id_modified_timestamp"]
                            ["indicators"],
                            "reports":
                            reports_last["last_id_modified_timestamp"],
                        },
                        "last_id": {
                            "indicators":
                            current_state["last_id"]["indicators"],
                            "reports": reports_last["last_id"],
                        },
                    })
                message = "End of synchronization"
                self.helper.api.work.to_processed(work_id, message)
                self.helper.log_info(message)
                time.sleep(self.get_interval())
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Beispiel #11
0
class Malpedia:
    """OpenCTI Malpedia main class"""

    _STATE_LAST_RUN = "state_last_run"
    _MALPEDIA_LAST_VERSION = "malpedia_last_version"

    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/../config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.SafeLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        # Extra config
        self.confidence_level = get_config_variable(
            "CONNECTOR_CONFIDENCE_LEVEL",
            ["connector", "confidence_level"],
            config,
            isNumber=True,
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.AUTH_KEY = get_config_variable(
            "MALPEDIA_AUTH_KEY", ["malpedia", "auth_key"], config
        )
        self.INTERVAL_SEC = get_config_variable(
            "MALPEDIA_INTERVAL_SEC", ["malpedia", "interval_sec"], config
        )
        self.import_intrusion_sets = get_config_variable(
            "MALPEDIA_IMPORT_INTRUSION_SETS",
            ["malpedia", "import_intrusion_sets"],
            config,
        )
        self.import_yara = get_config_variable(
            "MALPEDIA_IMPORT_YARA", ["malpedia", "import_yara"], config
        )
        self.create_indicators = get_config_variable(
            "MALPEDIA_CREATE_INDICATORS", ["malpedia", "create_indicators"], config
        )
        self.create_observables = get_config_variable(
            "MALPEDIA_CREATE_OBSERVABLES", ["malpedia", "create_observables"], config
        )

        self.helper = OpenCTIConnectorHelper(config)
        self.helper.log_info(f"loaded malpedia config: {config}")

        # Create Malpedia client and importers
        self.client = MalpediaClient(self.AUTH_KEY)

        # If we run without API key we can assume all data is TLP:WHITE else we
        # default to TLP:AMBER to be safe.
        if self.client.unauthenticated:
            self.default_marking = self.helper.api.marking_definition.read(
                id=TLP_WHITE["id"]
            )
        else:
            self.default_marking = self.helper.api.marking_definition.read(
                id=TLP_AMBER["id"]
            )

        self.knowledge_importer = KnowledgeImporter(
            self.helper,
            self.client,
            self.confidence_level,
            self.update_existing_data,
            self.import_intrusion_sets,
            self.import_yara,
            self.create_indicators,
            self.create_observables,
            self.default_marking,
        )

    def _load_state(self) -> Dict[str, Any]:
        current_state = self.helper.get_state()
        if not current_state:
            return {}
        return current_state

    @staticmethod
    def _get_state_value(
        state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None
    ) -> Any:
        if state is not None:
            return state.get(key, default)
        return default

    def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool:
        if last_run is None:
            return True
        time_diff = current_time - last_run
        return time_diff >= int(self.INTERVAL_SEC)

    def _check_version(self, last_version: Optional[int], current_version: int) -> bool:
        if last_version is None:
            return True
        return current_version > last_version

    @staticmethod
    def _current_unix_timestamp() -> int:
        return int(datetime.utcnow().timestamp())

    def _get_interval(self):
        return int(self.INTERVAL_SEC)

    def run(self):
        self.helper.log_info("starting Malpedia connector...")
        while True:
            try:
                current_malpedia_version = self.client.current_version()
                self.helper.log_info(
                    f"current Malpedia version: {current_malpedia_version}"
                )
                timestamp = self._current_unix_timestamp()
                current_state = self._load_state()

                self.helper.log_info(f"loaded state: {current_state}")

                last_run = self._get_state_value(current_state, self._STATE_LAST_RUN)

                last_malpedia_version = self._get_state_value(
                    current_state, self._MALPEDIA_LAST_VERSION
                )

                # Only run the connector if:
                #  1. It is scheduled to run per interval
                #  2. The global Malpedia version from the API is newer than our
                #     last stored version.
                if self._is_scheduled(last_run, timestamp) and self._check_version(
                    last_malpedia_version, current_malpedia_version
                ):
                    self.helper.log_info("running importers")

                    knowledge_importer_state = self._run_knowledge_importer(
                        current_state
                    )
                    self.helper.log_info("done with running importers")

                    new_state = current_state.copy()
                    new_state.update(knowledge_importer_state)
                    new_state[self._STATE_LAST_RUN] = self._current_unix_timestamp()
                    new_state[self._MALPEDIA_LAST_VERSION] = current_malpedia_version

                    self.helper.log_info(f"storing new state: {new_state}")

                    self.helper.set_state(new_state)

                    self.helper.log_info(
                        f"state stored, next run in: {self._get_interval()} seconds"
                    )
                else:
                    new_interval = self._get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        f"connector will not run, next run in: {new_interval} seconds"
                    )

                time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                exit(0)

    def _run_knowledge_importer(
        self, current_state: Mapping[str, Any]
    ) -> Mapping[str, Any]:
        return self.knowledge_importer.run(current_state)
Beispiel #12
0
class OpenCTI:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.SafeLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.opencti_sectors_file_url = get_config_variable(
            "CONFIG_SECTORS_FILE_URL", ["config", "sectors_file_url"], config
        )
        self.opencti_geography_file_url = get_config_variable(
            "CONFIG_GEOGRAPHY_FILE_URL", ["config", "geography_file_url"], config
        )
        self.opencti_interval = get_config_variable(
            "CONFIG_INTERVAL", ["config", "interval"], config, True
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

    def get_interval(self):
        return int(self.opencti_interval) * 60 * 60 * 24

    def run(self):
        self.helper.log_info("Fetching OpenCTI datasets...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info(
                        "Connector last run: "
                        + datetime.utcfromtimestamp(last_run).strftime(
                            "%Y-%m-%d %H:%M:%S"
                        )
                    )
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")
                # If the last_run is more than interval-1 day
                if last_run is None or (
                    (timestamp - last_run)
                    > ((int(self.opencti_interval) - 1) * 60 * 60 * 24)
                ):
                    now = datetime.utcfromtimestamp(timestamp)
                    friendly_name = "OpenCTI datasets run @ " + now.strftime(
                        "%Y-%m-%d %H:%M:%S"
                    )
                    work_id = self.helper.api.work.initiate_work(
                        self.helper.connect_id, friendly_name
                    )
                    try:
                        sectors_data = urllib.request.urlopen(
                            self.opencti_sectors_file_url
                        ).read()
                        self.helper.send_stix2_bundle(
                            sectors_data.decode("utf-8"),
                            entities_types=self.helper.connect_scope,
                            update=self.update_existing_data,
                            work_id=work_id,
                        )
                    except Exception as e:
                        self.helper.log_error(str(e))
                    try:
                        geography_data = urllib.request.urlopen(
                            self.opencti_geography_file_url
                        ).read()
                        self.helper.send_stix2_bundle(
                            geography_data.decode("utf-8"),
                            entities_types=self.helper.connect_scope,
                            update=self.update_existing_data,
                            work_id=work_id,
                        )
                    except Exception as e:
                        self.helper.log_error(str(e))
                    # Store the current timestamp as a last run
                    message = "Connector successfully run, storing last_run as " + str(
                        timestamp
                    )
                    self.helper.log_info(message)
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.api.work.to_processed(work_id, message)
                    self.helper.log_info(
                        "Last_run stored, next run in: "
                        + str(round(self.get_interval() / 60 / 60 / 24, 2))
                        + " days"
                    )
                    time.sleep(60)
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        "Connector will not run, next run in: "
                        + str(round(new_interval / 60 / 60 / 24, 2))
                        + " days"
                    )
                    time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Beispiel #13
0
class ExportReportPdf:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)

        # ExportReportPdf specific config settings
        self.primary_color = get_config_variable(
            "EXPORT_REPORT_PDF_PRIMARY_COLOR",
            ["export_report_pdf", "primary_color"],
            config,
        )
        self.secondary_color = get_config_variable(
            "EXPORT_REPORT_PDF_SECONDARY_COLOR",
            ["export_report_pdf", "secondary_color"],
            config,
        )
        self.set_colors()
        self.company_address_line_1 = get_config_variable(
            "EXPORT_REPORT_PDF_COMPANY_ADDRESS_LINE_1",
            ["export_report_pdf", "company_address_line_1"],
            config,
        )
        self.company_address_line_2 = get_config_variable(
            "EXPORT_REPORT_PDF_COMPANY_ADDRESS_LINE_2",
            ["export_report_pdf", "company_address_line_2"],
            config,
        )
        self.company_address_line_3 = get_config_variable(
            "EXPORT_REPORT_PDF_COMPANY_ADDRESS_LINE_3",
            ["export_report_pdf", "company_address_line_3"],
            config,
        )
        self.company_phone_number = get_config_variable(
            "EXPORT_REPORT_PDF_COMPANY_PHONE_NUMBER",
            ["export_report_pdf", "company_phone_number"],
            config,
        )
        self.company_email = get_config_variable(
            "EXPORT_REPORT_PDF_COMPANY_EMAIL",
            ["export_report_pdf", "company_email"],
            config,
        )
        self.company_website = get_config_variable(
            "EXPORT_REPORT_PDF_COMPANY_WEBSITE",
            ["export_report_pdf", "company_website"],
            config,
        )
        self.indicators_only = get_config_variable(
            "EXPORT_REPORT_PDF_INDICATORS_ONLY",
            ["export_report_pdf", "indicators_only"],
            config,
        )
        self.defang_urls = get_config_variable(
            "EXPORT_REPORT_PDF_DEFANG_URLS",
            ["export_report_pdf", "defang_urls"],
            config,
        )

    def _process_message(self, data):
        file_name = data["file_name"]
        # TODO this can be implemented to filter every entity and observable
        # max_marking = data["max_marking"]
        entity_type = data["entity_type"]

        if entity_type != "Report":
            raise ValueError(
                f'This Connector can only process entities of type "Report" and not of type "{entity_type}".'
            )

        # Get the Report
        report_dict = self.helper.api.report.read(id=data["entity_id"])

        # Extract values for inclusion in output pdf
        report_marking = report_dict.get("objectMarking", None)
        if report_marking:
            report_marking = report_marking[-1]["definition"]
        report_name = report_dict["name"]
        report_description = report_dict.get("description",
                                             "No description available.")
        report_confidence = report_dict["confidence"]
        report_id = report_dict["id"]
        report_external_refs = [
            external_ref_dict["url"]
            for external_ref_dict in report_dict["externalReferences"]
        ]
        report_objs = report_dict["objects"]
        report_date = datetime.datetime.now().strftime("%b %d %Y")

        context = {
            "report_name": report_name,
            "report_description": report_description,
            "report_marking": report_marking,
            "report_confidence": report_confidence,
            "report_external_refs": report_external_refs,
            "report_date": report_date,
            "company_address_line_1": self.company_address_line_1,
            "company_address_line_2": self.company_address_line_2,
            "company_address_line_3": self.company_address_line_3,
            "company_phone_number": self.company_phone_number,
            "company_email": self.company_email,
            "company_website": self.company_website,
            "entities": {},
            "observables": {},
        }

        # Process each STIX Object
        for report_obj in report_objs:
            obj_entity_type = report_obj["entity_type"]
            obj_id = report_obj["standard_id"]

            # Handle StixCyberObservables entities
            if obj_entity_type == "StixFile" or StixCyberObservableTypes.has_value(
                    obj_entity_type):
                observable_dict = self.helper.api.stix_cyber_observable.read(
                    id=obj_id)

                # If only include indicators and
                # the observable doesn't have an indicator, skip it
                if self.indicators_only and not observable_dict["indicators"]:
                    self.helper.log_info(
                        f"Skipping {obj_entity_type} observable with value {observable_dict['observable_value']} as it was not an Indicator."
                    )
                    continue

                if obj_entity_type not in context["observables"]:
                    context["observables"][obj_entity_type] = []

                # Defang urls
                if self.defang_urls and obj_entity_type == "Url":
                    observable_dict["observable_value"] = observable_dict[
                        "observable_value"].replace("http", "hxxp", 1)

                context["observables"][obj_entity_type].append(observable_dict)

            # Handle all other entities
            else:
                reader_func = self.get_reader(obj_entity_type)
                if reader_func is None:
                    self.helper.log_error(
                        f'Could not find a function to read entity with type "{obj_entity_type}"'
                    )
                    continue
                entity_dict = reader_func(id=obj_id)

                if obj_entity_type not in context["entities"]:
                    context["entities"][obj_entity_type] = []

                context["entities"][obj_entity_type].append(entity_dict)

        # Render html with input variables
        env = Environment(
            loader=FileSystemLoader(os.path.abspath(os.getcwd())))
        template = env.get_template("src/resources/report.html")
        html_string = template.render(context)

        # Generate pdf from html string
        pdf_contents = HTML(string=html_string,
                            base_url="src/resources").write_pdf()

        # Upload the output pdf
        self.helper.log_info(f"Uploading: {file_name}")
        self.helper.api.stix_domain_object.add_file(
            id=report_id,
            file_name=file_name,
            data=pdf_contents,
            mime_type="application/pdf",
        )
        return "Export done"

    def set_colors(self):
        with open("src/resources/report.css.template", "r") as f:
            new_css = f.read()
            new_css = new_css.replace("<primary_color>", self.primary_color)
            new_css = new_css.replace("<secondary_color>",
                                      self.secondary_color)

        with open("src/resources/report.css", "w") as f:
            f.write(new_css)

    def get_reader(self, entity_type):
        """
        Returns the function to use for calling the OpenCTI to
        read data for a particular entity type.

        entity_type: a str representing the entity type, i.e. Indicator

        returns: a function or None if entity type is not supported
        """
        reader = {
            "Stix-Domain-Object": self.helper.api.stix_domain_object.read,
            "Attack-Pattern": self.helper.api.attack_pattern.read,
            "Campaign": self.helper.api.campaign.read,
            "Note": self.helper.api.note.read,
            "Observed-Data": self.helper.api.observed_data.read,
            "Organization": self.helper.api.identity.read,
            "Opinion": self.helper.api.opinion.read,
            "Report": self.helper.api.report.read,
            "Sector": self.helper.api.identity.read,
            "System": self.helper.api.identity.read,
            "Course-Of-Action": self.helper.api.course_of_action.read,
            "Identity": self.helper.api.identity.read,
            "Indicator": self.helper.api.indicator.read,
            "Individual": self.helper.api.identity.read,
            "Infrastructure": self.helper.api.infrastructure.read,
            "Intrusion-Set": self.helper.api.intrusion_set.read,
            "Malware": self.helper.api.malware.read,
            "Threat-Actor": self.helper.api.threat_actor.read,
            "Tool": self.helper.api.tool.read,
            "Vulnerability": self.helper.api.vulnerability.read,
            "Incident": self.helper.api.incident.read,
            "City": self.helper.api.location.read,
            "Country": self.helper.api.location.read,
            "Region": self.helper.api.location.read,
            "Position": self.helper.api.location.read,
            "Location": self.helper.api.location.read,
        }
        return reader.get(entity_type, None)

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #14
0
class Cybercrimetracker:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = "{}/config.yml".format(
            os.path.dirname(os.path.abspath(__file__)))

        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)

        # Connector Config
        self.confidence_level = get_config_variable(
            "CONNECTOR_CONFIDENCE_LEVEL",
            ["connector", "confidence_level"],
            config,
            isNumber=True,
        )
        self.update_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

        # CYBERCRiME-TRACKER.NET Config
        self.feed_url = get_config_variable(
            "CYBERCRIMETRACKER_FEED_URL",
            ["cybercrimetracker", "feed_url"],
            config,
        )
        self.connector_tlp = get_config_variable(
            "CYBERCRIMETRACKER_TLP",
            ["cybercrimetracker", "tlp"],
            config,
        )
        self.interval = get_config_variable(
            "CYBERCRIMETRACKER_INTERVAL",
            ["cybercrimetracker", "interval"],
            config,
            isNumber=True,
        )

    @staticmethod
    def _time_to_datetime(input_date: time) -> datetime:
        return datetime(
            input_date.tm_year,
            input_date.tm_mon,
            input_date.tm_mday,
            input_date.tm_hour,
            input_date.tm_min,
            input_date.tm_sec,
            tzinfo=timezone.utc,
        ).isoformat()

    def parse_feed_entry(self, entry):
        """
        Parses an entry from the feed and returns a dict with:

        date: date in iso format
        type: name of the malware associated with the C2 server
        url: the url of the C2
        ip: the IP address of the C2
        ext_link: An external link to CYBERCRiME-TRACKER.NET with details

        Note: CYBERCRiME-TRACKER.NET does not provide the protocol in the url
        as such we always assume 'http'.
        """
        parsed_entry = {}

        pattern = (
            r"(?:\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type}" +
            r"\s+-%{GREEDYDATA}:\s+%{IP:ip}|" +
            r"\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type})")

        entry_summary = Grok(pattern).match(entry["summary"])

        if entry_summary:
            parsed_entry["date"] = self._time_to_datetime(
                entry["published_parsed"])
            parsed_entry["type"] = entry_summary["type"]
            parsed_entry["ext_link"] = entry["link"]
            parsed_entry["url"] = "http://{}".format(quote(entry["title"]))
            hostname = urlparse(parsed_entry["url"]).hostname

            if entry_summary["ip"] is None:
                parsed_entry["ip"] = hostname
            else:
                parsed_entry["ip"] = entry_summary["ip"]
                parsed_entry["domain"] = hostname

            self.helper.log_info("Parsed entry: {}".format(entry["title"]))

            return parsed_entry
        else:
            self.helper.log_error("Could not parse: {}".format(entry["title"]))
            return False

    def gen_indicator_pattern(self, parsed_entry):

        if "domain" in parsed_entry.keys():
            indicator_pattern = (
                "[ipv4-addr:value='{}'] ".format(parsed_entry["ip"]) +
                "AND [url:value='{}'] ".format(parsed_entry["url"]) +
                "AND [domain:value='{}']".format(parsed_entry["domain"]))
        else:
            indicator_pattern = "[ipv4-addr:value='{}'] ".format(
                parsed_entry["ip"]) + "AND [url:value='{}']".format(
                    parsed_entry["url"])

        return indicator_pattern

    def run(self):

        self.helper.log_info("Fetching data CYBERCRiME-TRACKER.NET...")

        tag = self.helper.api.tag.create(
            tag_type="C2-Type",
            value="C2 Server",
            color="#fc236b",
        )
        tlp = self.helper.api.marking_definition.read(
            filters=[{
                "key": "definition",
                "values": "TLP:{}".format(self.connector_tlp)
            }])

        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()

                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info("Connector last run: {}".format(
                        datetime.utcfromtimestamp(last_run).strftime(
                            "%Y-%m-%d %H:%M:%S")))
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")

                # Run if it is the first time or we are past the interval

                if last_run is None or (
                    (timestamp - last_run) > self.interval):
                    self.helper.log_info("Connector will run!")

                    # Get Feed Content
                    feed = feedparser.parse(self.feed_url)

                    self.helper.log_info("Found: {} entries.".format(
                        len(feed["entries"])))

                    self.feed_summary = {
                        "Source":
                        feed["feed"]["title"],
                        "Date":
                        self._time_to_datetime(
                            feed["feed"]["published_parsed"]),
                        "Details":
                        feed["feed"]["subtitle"],
                        "Link":
                        feed["feed"]["link"],
                    }

                    # Create entity for the feed.
                    organization = self.helper.api.identity.create(
                        type="Organization",
                        name="CYBERCRiME-TRACKER.NET",
                        description="Tracker collecting and sharing \
                            daily updates of C2 IPs/Urls. \
                            http://cybercrime-tracker.net",
                    )

                    for entry in feed["entries"]:

                        parsed_entry = self.parse_feed_entry(entry)

                        ext_reference = self.helper.api.external_reference.create(
                            source_name="{}".format(
                                self.feed_summary["Source"], ),
                            url=parsed_entry["ext_link"],
                        )

                        indicator_pattern = self.gen_indicator_pattern(
                            parsed_entry)

                        # Add malware related to indicator
                        malware = self.helper.api.malware.create(
                            name=parsed_entry["type"],
                            description="{} malware.".format(
                                parsed_entry["type"]),
                        )

                        # Add indicator
                        indicator = self.helper.api.indicator.create(
                            name=parsed_entry["url"],
                            description="C2 URL for: {}".format(
                                parsed_entry["type"]),
                            pattern_type="stix",
                            indicator_pattern=indicator_pattern,
                            main_observable_type="URL",
                            valid_from=parsed_entry["date"],
                            created=parsed_entry["date"],
                            modified=parsed_entry["date"],
                            createdByRef=organization["id"],
                            markingDefinitions=[tlp["id"]],
                            update=self.update_data,
                        )

                        # Add tag
                        self.helper.api.stix_entity.add_tag(
                            id=indicator["id"],
                            tag_id=tag["id"],
                        )

                        self.helper.api.stix_entity.add_external_reference(
                            id=indicator["id"],
                            external_reference_id=ext_reference["id"],
                        )

                        # Add relationship with malware
                        relation = self.helper.api.stix_relation.create(
                            fromType="Indicator",
                            fromId=indicator["id"],
                            toType="Malware",
                            toId=malware["id"],
                            relationship_type="indicates",
                            first_seen=self._time_to_datetime(
                                entry["published_parsed"]),
                            last_seen=self._time_to_datetime(
                                entry["published_parsed"]),
                            description="URLs associated to: " +
                            parsed_entry["type"],
                            weight=self.confidence_level,
                            role_played="C2 Server",
                            createdByRef=organization["id"],
                            created=parsed_entry["date"],
                            modified=parsed_entry["date"],
                            update=self.update_data,
                        )

                        self.helper.api.stix_entity.add_external_reference(
                            id=relation["id"],
                            external_reference_id=ext_reference["id"],
                        )

                        # Create Observables and link them to Indicator
                        observable_url = self.helper.api.stix_observable.create(
                            type="URL",
                            observable_value=parsed_entry["url"],
                            createdByRef=organization["id"],
                            markingDefinitions=[tlp["id"]],
                            update=self.update_data,
                        )

                        self.helper.api.stix_entity.add_external_reference(
                            id=observable_url["id"],
                            external_reference_id=ext_reference["id"],
                        )

                        self.helper.api.indicator.add_stix_observable(
                            id=indicator["id"],
                            stix_observable_id=observable_url["id"],
                        )

                        observable_ip = self.helper.api.stix_observable.create(
                            type="IPv4-Addr",
                            observable_value=parsed_entry["ip"],
                            createdByRef=organization["id"],
                            markingDefinitions=[tlp["id"]],
                            update=self.update_data,
                        )

                        self.helper.api.stix_entity.add_external_reference(
                            id=observable_ip["id"],
                            external_reference_id=ext_reference["id"],
                        )

                        self.helper.api.indicator.add_stix_observable(
                            id=indicator["id"],
                            stix_observable_id=observable_ip["id"],
                        )

                        if "domain" in parsed_entry.keys():
                            observable_domain = self.helper.api.stix_observable.create(
                                type="Domain",
                                observable_value=parsed_entry["domain"],
                                createdByRef=organization["id"],
                                markingDefinitions=[tlp["id"]],
                                update=self.update_data,
                            )

                            self.helper.api.stix_entity.add_external_reference(
                                id=observable_domain["id"],
                                external_reference_id=ext_reference["id"],
                            )

                            self.helper.api.indicator.add_stix_observable(
                                id=indicator["id"],
                                stix_observable_id=observable_domain["id"],
                            )
                            self.helper.api.stix_relation.create(
                                fromType="Domain",
                                fromId=observable_domain["id"],
                                toType="IPv4-Addr",
                                toId=observable_ip["id"],
                                relationship_type="resolves",
                                last_seen=self._time_to_datetime(
                                    entry["published_parsed"]),
                                weight=self.confidence_level,
                                createdByRef=organization["id"],
                                created=parsed_entry["date"],
                                modified=parsed_entry["date"],
                                update=self.update_data,
                            )

                    # Store the current timestamp as a last run
                    self.helper.log_info("Connector successfully run, \
                            storing last_run as: {}".format(str(timestamp)))
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.log_info(
                        "Last_run stored, next run in: {} seconds.".format(
                            str(round(self.interval, 2))))

                    new_state = {"last_run": int(time.time())}
                    self.helper.set_state(new_state)
                    time.sleep(60)
                else:
                    new_interval = self.interval - (timestamp - last_run)
                    self.helper.log_info("Connector will not run. \
                            Next run in: {} seconds.".format(
                        str(round(new_interval, 2))))
                    time.sleep(60)

            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
class HygieneConnector:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        self.warninglists = WarningLists()

        # Create Hygiene Tag
        self.tag_hygiene = self.helper.api.tag.create(
            tag_type="Hygiene",
            value="Hygiene",
            color="#fc0341",
        )

    def _process_observable(self, observable):
        # Extract IPv4, IPv6 and Domain from entity data
        observable_value = observable["observable_value"]

        # Search in warninglist
        result = self.warninglists.search(observable_value)

        # Iterate over the hits
        if result:
            self.helper.log_info("Hit found for %s in warninglists" %
                                 (observable_value))

            for hit in result:
                self.helper.log_info(
                    "Type: %s | Name: %s | Version: %s | Descr: %s" %
                    (hit.type, hit.name, hit.version, hit.description))

                self.helper.api.stix_entity.add_tag(
                    id=observable["id"], tag_id=self.tag_hygiene["id"])

                # Create external references
                external_reference_id = self.helper.api.external_reference.create(
                    source_name="misp-warninglist",
                    url="https://github.com/MISP/misp-warninglists/tree/master"
                    + LIST_MAPPING[hit.name],
                    external_id=hit.name,
                    description=hit.description,
                )

                self.helper.api.stix_entity.add_external_reference(
                    id=observable["id"],
                    external_reference_id=external_reference_id["id"],
                )

            return [
                "observable value found on warninglist and tagged accordingly"
            ]

    def _process_message(self, data):
        entity_id = data["entity_id"]
        observable = self.helper.api.stix_observable.read(id=entity_id)
        return self._process_observable(observable)

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #16
0
class ExportFileCsv:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + '/config.yml'
        config = yaml.load(open(config_file_path), Loader=yaml.FullLoader
                           ) if os.path.isfile(config_file_path) else {}
        self.helper = OpenCTIConnectorHelper(config)

    def export_dict_list_to_csv(self, data):
        output = io.StringIO()
        headers = sorted(set().union(*(d.keys() for d in data)))
        csv_data = [headers]
        for d in data:
            row = []
            for h in headers:
                if h not in d:
                    row.append('')
                elif isinstance(d[h], str):
                    row.append(d[h])
                elif isinstance(d[h], list):
                    if len(d[h]) > 0 and isinstance(d[h][0], str):
                        row.append(','.join(d[h]))
                    elif len(d[h]) > 0 and isinstance(d[h][0], dict):
                        rrow = []
                        for r in d[h]:
                            if 'name' in r:
                                rrow.append(r['name'])
                            elif 'definition' in r:
                                rrow.append(r['definition'])
                        row.append(','.join(rrow))
                    else:
                        row.append('')
                elif isinstance(d[h], dict):
                    if 'name' in d[h]:
                        row.append(d[h]['name'])
                    else:
                        row.append('')
                else:
                    row.append('')
            csv_data.append(row)
        writer = csv.writer(output,
                            delimiter=';',
                            quotechar='"',
                            quoting=csv.QUOTE_ALL)
        writer.writerows(csv_data)
        return output.getvalue()

    def _process_message(self, data):
        entity_id = data['entity_id']
        entity_type = data['entity_type']
        file_name = data['file_name']
        file_context = data['file_context']
        export_type = data['export_type']
        list_args = data['list_args']
        max_marking_definition = data['max_marking_definition']
        if entity_id is not None:
            self.helper.log_info('Exporting: ' + entity_type + '/' +
                                 export_type + '(' + entity_id + ') to ' +
                                 file_name)
            entity_data = self.helper.api.stix_domain_entity.read(id=entity_id)
            entities_list = [entity_data]
            if 'objectRefsIds' in entity_data:
                for id in entity_data['objectRefsIds']:
                    entity = self.helper.api.stix_domain_entity.read(id=id)
                    entities_list.append(entity)
            csv_data = self.export_dict_list_to_csv(entities_list)
            self.helper.log_info('Uploading: ' + entity_type + '/' +
                                 export_type + '(' + entity_id + ') to ' +
                                 file_name)
            self.helper.api.stix_domain_entity.push_entity_export(
                entity_id, file_name, csv_data)
            self.helper.log_info('Export done: ' + entity_type + '/' +
                                 export_type + '(' + entity_id + ') to ' +
                                 file_name)
        else:
            self.helper.log_info('Exporting list: ' + entity_type + '/' +
                                 export_type + ' to ' + file_name)
            max_marking_definition_entity = self.helper.api.marking_definition.read(
                id=max_marking_definition
            ) if max_marking_definition is not None else None

            if IdentityTypes.has_value(entity_type):
                if list_args['filters'] is not None:
                    list_args['filters'].append({
                        'key': 'entity_type',
                        'values': [entity_type]
                    })
                else:
                    list_args['filters'] = [{
                        'key': 'entity_type',
                        'values': [entity_type]
                    }]
                entity_type = 'identity'

            # List
            lister = {
                'identity': self.helper.api.identity.list,
                'threat-actor': self.helper.api.threat_actor.list,
                'intrusion-set': self.helper.api.intrusion_set.list,
                'campaign': self.helper.api.campaign.list,
                'incident': self.helper.api.incident.list,
                'malware': self.helper.api.malware.list,
                'tool': self.helper.api.tool.list,
                'vulnerability': self.helper.api.vulnerability.list,
                'attack-pattern': self.helper.api.attack_pattern.list,
                'course-of-action': self.helper.api.course_of_action.list,
                'report': self.helper.api.report.list,
                'indicator': self.helper.api.indicator.list
            }
            do_list = lister.get(
                entity_type.lower(), lambda **kwargs: self.helper.
                log_error('Unknown object type "' + entity_type +
                          '", doing nothing...'))
            entities_list = do_list(search=list_args['search'],
                                    filters=list_args['filters'],
                                    orderBy=list_args['orderBy'],
                                    orderMode=list_args['orderMode'],
                                    getAll=True)
            csv_data = self.export_dict_list_to_csv(entities_list)
            self.helper.log_info('Uploading: ' + entity_type + '/' +
                                 export_type + ' to ' + file_name)
            self.helper.api.stix_domain_entity.push_list_export(
                entity_type, file_name, csv_data, file_context,
                json.dumps(list_args))
            self.helper.log_info('Export done: ' + entity_type + '/' +
                                 export_type + ' to ' + file_name)
        return ['Export done']

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #17
0
class Misp:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + '/config.yml'
        config = yaml.load(open(config_file_path), Loader=yaml.FullLoader) if os.path.isfile(config_file_path) else {}
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.misp_url = get_config_variable('MISP_URL', ['misp', 'url'], config)
        self.misp_key = get_config_variable('MISP_KEY', ['misp', 'key'], config)
        self.misp_ssl_verify = get_config_variable('MISP_SSL_VERIFY', ['misp', 'ssl_verify'], config)
        self.misp_create_report = get_config_variable('MISP_CREATE_REPORTS', ['misp', 'create_reports'], config)
        self.misp_report_class = get_config_variable(
            'MISP_REPORT_CLASS',
            ['misp', 'report_class'],
            config
        ) or 'MISP Event'
        self.misp_import_from_date = get_config_variable('MISP_IMPORT_FROM_DATE', ['misp', 'import_from_date'], config)
        self.misp_import_tags = get_config_variable('MISP_IMPORT_TAGS', ['misp', 'import_tags'], config)
        self.misp_interval = get_config_variable('MISP_INTERVAL', ['misp', 'interval'], config, True)
        self.update_existing_data = get_config_variable(
            'CONNECTOR_UPDATE_EXISTING_DATA',
            ['connector', 'update_existing_data'],
            config
        )

        # Initialize MISP
        self.misp = ExpandedPyMISP(url=self.misp_url, key=self.misp_key, ssl=self.misp_ssl_verify, debug=False)

    def get_interval(self):
        return int(self.misp_interval) * 60

    def run(self):
        while True:
            timestamp = int(time.time())
            # Get the last_run datetime
            current_state = self.helper.get_state()
            if current_state is not None and 'last_run' in current_state:
                last_run = datetime.utcfromtimestamp(current_state['last_run']).strftime('%Y-%m-%d %H:%M:%S')
                self.helper.log_info(
                    'Connector last run: ' + last_run)
            else:
                last_run = None
                self.helper.log_info('Connector has never run')

            # If import with tags
            complex_query_tag = None
            if self.misp_import_tags is not None:
                or_parameters = []
                for tag in self.misp_import_tags.split(','):
                    or_parameters.append(tag.strip())
                    complex_query_tag = self.misp.build_complex_query(or_parameters=or_parameters)

            # If import from a specific date
            import_from_date = None
            if self.misp_import_from_date is not None:
                import_from_date = parse(self.misp_import_from_date).strftime('%Y-%m-%d %H:%M:%S')

            # Prepare the query
            kwargs = dict()
            if complex_query_tag is not None: kwargs['tags'] = complex_query_tag
            if last_run is not None:
                kwargs['timestamp'] = last_run
            elif import_from_date is not None:
                kwargs['date_from'] = import_from_date

            # Query with pagination of 100
            current_page = 1
            while True:
                kwargs['limit'] = 100
                kwargs['page'] = current_page
                self.helper.log_info('Fetching MISP events with args: ' + json.dumps(kwargs))
                events = self.misp.search('events', **kwargs)
                self.helper.log_info('MISP returned ' + str(len(events)) + ' events.')
                # Break if no more result
                if len(events) == 0:
                    break
                self.process_events(events)
                current_page += 1
            # Set the last_run timestamp
            self.helper.set_state({'last_run': timestamp})
            time.sleep(self.get_interval())

    def process_events(self, events):
        for event in events:
            ### Default variables
            added_markings = []
            added_entities = []
            added_object_refs = []

            ### Pre-process
            # Author
            author = Identity(name=event['Event']['Orgc']['name'], identity_class='organization')
            # Elements
            event_elements = self.prepare_elements(event['Event']['Galaxy'])
            # Markings
            if 'Tag' in event['Event']:
                event_markings = self.resolve_markings(event['Event']['Tag'])
            else:
                event_markings = [TLP_WHITE]
            # ExternalReference
            event_external_reference = ExternalReference(
                source_name=self.helper.connect_name,
                external_id=event['Event']['uuid'],
                url=self.misp_url + '/events/view/' + event['Event']['uuid'])

            ### Get indicators
            indicators = []
            # Get attributes
            for attribute in event['Event']['Attribute']:
                indicator = self.process_attribute(author, event_elements, event_markings, attribute)
                if indicator is not None:
                    indicators.append(indicator)
            # Get attributes of objects
            objects_relationships = []
            for object in event['Event']['Object']:
                object_attributes = []
                for attribute in object['Attribute']:
                    indicator = self.process_attribute(author, event_elements, event_markings, attribute)
                    if indicator is not None:
                        indicators.append(indicator)
                        if object['meta-category'] == 'file' and indicator[
                            'indicator'].x_opencti_observable_type in FILETYPES:
                            object_attributes.append(indicator)
                objects_relationships.extend(self.process_observable_relations(object_attributes, []))

            ### Prepare the bundle
            bundle_objects = [author]
            object_refs = []
            # Add event markings
            for event_marking in event_markings:
                if event_marking['id'] not in added_markings:
                    bundle_objects.append(event_marking)
                    added_markings.append(event_marking['id'])
            # Add event elements
            all_event_elements = \
                event_elements['intrusion_sets'] + \
                event_elements['malwares'] + \
                event_elements['tools'] + \
                event_elements['attack_patterns']
            for event_element in all_event_elements:
                if event_element['name'] not in added_object_refs:
                    object_refs.append(event_element)
                    added_object_refs.append(event_element['name'])
                if event_element['name'] not in added_entities:
                    bundle_objects.append(event_element)
                    added_entities.append(event_element['name'])
            # Add indicators
            for indicator in indicators:
                if indicator['indicator']['id'] not in added_object_refs:
                    object_refs.append(indicator['indicator'])
                    added_object_refs.append(indicator['indicator']['id'])
                if indicator['indicator']['id'] not in added_entities:
                    bundle_objects.append(indicator['indicator'])
                    added_entities.append(indicator['indicator']['id'])
                # Add attribute markings
                for attribute_marking in indicator['markings']:
                    if attribute_marking['id'] not in added_markings:
                        bundle_objects.append(attribute_marking)
                        added_markings.append(attribute_marking['id'])
                # Add attribute elements
                all_attribute_elements = \
                    indicator['attribute_elements']['intrusion_sets'] + \
                    indicator['attribute_elements']['malwares'] + \
                    indicator['attribute_elements']['tools'] + \
                    indicator['attribute_elements']['attack_patterns']
                for attribute_element in all_attribute_elements:
                    if attribute_element['name'] not in added_object_refs:
                        object_refs.append(attribute_element)
                        added_object_refs.append(attribute_element['name'])
                    if attribute_element['name'] not in added_entities:
                        bundle_objects.append(attribute_element)
                        added_entities.append(attribute_element['name'])
                # Add attribute relationships
                for relationship in indicator['relationships']:
                    object_refs.append(relationship)
                    bundle_objects.append(relationship)
            # Add object_relationships
            for object_relationship in objects_relationships:
                bundle_objects.append(object_relationship)

            ### Create the report if needed
            if self.misp_create_report and len(object_refs) > 0:
                report = Report(
                    name=event['Event']['info'],
                    description=event['Event']['info'],
                    published=parse(event['Event']['date']),
                    created_by_ref=author,
                    object_marking_refs=event_markings,
                    labels=['threat-report'],
                    object_refs=object_refs,
                    external_references=[event_external_reference],
                    custom_properties={
                        'x_opencti_report_class': self.misp_report_class,
                        'x_opencti_object_status': 2
                    }
                )
                bundle_objects.append(report)
            bundle = Bundle(objects=bundle_objects).serialize()
            self.helper.send_stix2_bundle(bundle, None, self.update_existing_data, False)

    def process_attribute(self, author, event_elements, event_markings, attribute):
        resolved_attributes = self.resolve_type(attribute['type'], attribute['value'])
        if resolved_attributes is None:
            return None

        for resolved_attribute in resolved_attributes:
            ### Pre-process
            # Elements
            attribute_elements = self.prepare_elements(attribute['Galaxy'])
            # Markings
            if 'Tag' in attribute:
                attribute_markings = self.resolve_markings(attribute['Tag'], with_default=False)
                if len(attribute_markings) == 0:
                    attribute_markings = event_markings
            else:
                attribute_markings = event_markings

            ### Create the indicator
            observable_type = resolved_attribute['type']
            observable_value = resolved_attribute['value']
            pattern_type = 'stix'
            if observable_type in PATTERNTYPES:
                pattern_type = observable_type
            elif observable_type not in OPENCTISTIX2:
                return None
            else:
                if 'transform' in OPENCTISTIX2[observable_type]:
                    if OPENCTISTIX2[observable_type]['transform']['operation'] == 'remove_string':
                        observable_value = observable_value.replace(OPENCTISTIX2[observable_type]['transform']['value'], '')
                lhs = ObjectPath(OPENCTISTIX2[observable_type]['type'], OPENCTISTIX2[observable_type]['path'])
                observable_value = ObservationExpression(EqualityComparisonExpression(lhs, observable_value))
            try:
                indicator = Indicator(
                    name=resolved_attribute['value'],
                    description=attribute['comment'],
                    pattern=str(observable_value),
                    valid_from=datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime('%Y-%m-%dT%H:%M:%SZ'),
                    labels=['malicious-activity'],
                    created_by_ref=author,
                    object_marking_refs=attribute_markings,
                    custom_properties={
                        'x_opencti_observable_type': resolved_attribute['type'],
                        'x_opencti_observable_value': resolved_attribute['value'],
                        'x_opencti_pattern_type': pattern_type
                    }
                )
            except:
                return None

            ### Create the relationships
            relationships = []
            # Event threats
            for threat in (event_elements['intrusion_sets'] + event_elements['malwares'] + event_elements['tools']):
                relationships.append(
                    Relationship(
                        relationship_type='indicates',
                        created_by_ref=author,
                        source_ref=indicator.id,
                        target_ref=threat.id,
                        description=attribute['comment'],
                        object_marking_refs=attribute_markings,
                        custom_properties={
                            'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_weight': self.helper.connect_confidence_level
                        }
                    )
                )
            # Attribute threats
            for threat in (attribute_elements['intrusion_sets'] + attribute_elements['malwares'] + attribute_elements[
                'tools']):
                relationships.append(
                    Relationship(
                        relationship_type='indicates',
                        created_by_ref=author,
                        source_ref=indicator.id,
                        target_ref=threat.id,
                        description=attribute['comment'],
                        object_marking_refs=attribute_markings,
                        custom_properties={
                            'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_weight': self.helper.connect_confidence_level
                        }
                    )
                )
            # Event Attack Patterns
            for attack_pattern in event_elements['attack_patterns']:
                if len(event_elements['malwares']) > 0:
                    threats = event_elements['malwares']
                elif len(event_elements['intrusion_sets']) > 0:
                    threats = event_elements['intrusion_sets']
                else:
                    threats = []
                for threat in threats:
                    relationship_uses = Relationship(
                        relationship_type='uses',
                        created_by_ref=author,
                        source_ref=threat.id,
                        target_ref=attack_pattern.id,
                        description=attribute['comment'],
                        object_marking_refs=attribute_markings,
                        custom_properties={
                            'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_weight': self.helper.connect_confidence_level,
                            'x_opencti_ignore_dates': True
                        }
                    )
                    relationships.append(relationship_uses)
                    relationship_indicates = Relationship(
                        relationship_type='indicates',
                        created_by_ref=author,
                        source_ref=indicator.id,
                        target_ref='malware--fa42a846-8d90-4e51-bc29-71d5b4802168',  # Fake
                        description=attribute['comment'],
                        object_marking_refs=attribute_markings,
                        custom_properties={
                            'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_weight': self.helper.connect_confidence_level,
                            'x_opencti_source_ref': indicator.id,
                            'x_opencti_target_ref': relationship_uses.id
                        }
                    )
                    relationships.append(relationship_indicates)
            # Attribute Attack Patterns
            for attack_pattern in attribute_elements['attack_patterns']:
                if len(attribute_elements['malwares']) > 0:
                    threats = attribute_elements['malwares']
                elif len(attribute_elements['intrusion_sets']) > 0:
                    threats = attribute_elements['intrusion_sets']
                else:
                    threats = []
                for threat in threats:
                    relationship_uses = Relationship(
                        relationship_type='uses',
                        created_by_ref=author,
                        source_ref=threat.id,
                        target_ref=attack_pattern.id,
                        description=attribute['comment'],
                        object_marking_refs=attribute_markings,
                        custom_properties={
                            'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_weight': self.helper.connect_confidence_level,
                            'x_opencti_ignore_dates': True
                        }
                    )
                    relationships.append(relationship_uses)
                    relationship_indicates = Relationship(
                        relationship_type='indicates',
                        created_by_ref=author,
                        source_ref=indicator.id,
                        target_ref='malware--fa42a846-8d90-4e51-bc29-71d5b4802168',  # Fake
                        description=attribute['comment'],
                        object_marking_refs=attribute_markings,
                        custom_properties={
                            'x_opencti_first_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_last_seen': datetime.utcfromtimestamp(int(attribute['timestamp'])).strftime(
                                '%Y-%m-%dT%H:%M:%SZ'),
                            'x_opencti_weight': self.helper.connect_confidence_level,
                            'x_opencti_source_ref': indicator.id,
                            'x_opencti_target_ref': relationship_uses.id,
                            'x_opencti_ignore_dates': True
                        }
                    )
                    relationships.append(relationship_indicates)

            return {
                'indicator': indicator,
                'relationships': relationships,
                'attribute_elements': attribute_elements,
                'markings': attribute_markings
            }

    def process_observable_relations(self, object_attributes, result_table, start_element=0):
        if start_element == 0:
            result_table = []
        if len(object_attributes) == 1:
            return []

        for x in range(start_element + 1, len(object_attributes)):
            result_table.append(
                Relationship(
                    relationship_type='corresponds',
                    source_ref=object_attributes[start_element]['indicator']['id'],
                    target_ref=object_attributes[x]['indicator']['id'],
                    description='Same file',
                    custom_properties={
                        'x_opencti_ignore_dates': True
                    }
                )
            )
        if start_element != len(object_attributes):
            return self.process_observable_relations(object_attributes, result_table, start_element + 1)
        else:
            return result_table

    def prepare_elements(self, galaxies):
        elements = {'intrusion_sets': [], 'malwares': [], 'tools': [], 'attack_patterns': []}
        added_names = []
        for galaxy in galaxies:
            # Get the linked intrusion sets
            if (
                    (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Intrusion Set') or
                    (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Threat Actor') or
                    (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Microsoft Activity Group actor')
            ):
                for galaxy_entity in galaxy['GalaxyCluster']:
                    if ' - G' in galaxy_entity['value']:
                        name = galaxy_entity['value'].split(' - G')[0]
                    elif 'APT ' in galaxy_entity['value']:
                        name = galaxy_entity['value'].replace('APT ', 'APT')
                    else:
                        name = galaxy_entity['value']
                    if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']:
                        aliases = galaxy_entity['meta']['synonyms']
                    else:
                        aliases = [name]
                    if name not in added_names:
                        elements['intrusion_sets'].append(IntrusionSet(
                            name=name,
                            labels=['intrusion-set'],
                            description=galaxy_entity['description'],
                            custom_properties={
                                'x_opencti_aliases': aliases
                            }
                        ))
                        added_names.append(name)
            # Get the linked malwares
            if (
                    (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Malware') or
                    (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Tool') or
                    (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Ransomware') or
                    (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Android') or
                    (galaxy['namespace'] == 'misp' and galaxy['name'] == 'Malpedia')
            ):
                for galaxy_entity in galaxy['GalaxyCluster']:
                    if ' - S' in galaxy_entity['value']:
                        name = galaxy_entity['value'].split(' - S')[0]
                    else:
                        name = galaxy_entity['value']
                    if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']:
                        aliases = galaxy_entity['meta']['synonyms']
                    else:
                        aliases = [name]
                    if name not in added_names:
                        elements['malwares'].append(Malware(
                            name=name,
                            labels=['malware'],
                            description=galaxy_entity['description'],
                            custom_properties={
                                'x_opencti_aliases': aliases
                            }
                        ))
                        added_names.append(name)
            # Get the linked tools
            if (
                    (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Tool')
            ):
                for galaxy_entity in galaxy['GalaxyCluster']:
                    if ' - S' in galaxy_entity['value']:
                        name = galaxy_entity['value'].split(' - S')[0]
                    else:
                        name = galaxy_entity['value']
                    if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']:
                        aliases = galaxy_entity['meta']['synonyms']
                    else:
                        aliases = [name]
                    if name not in added_names:
                        elements['tools'].append(Tool(
                            name=name,
                            labels=['tool'],
                            description=galaxy_entity['description'],
                            custom_properties={
                                'x_opencti_aliases': aliases
                            }
                        ))
                        added_names.append(name)
            # Get the linked attack_patterns
            if (
                    (galaxy['namespace'] == 'mitre-attack' and galaxy['name'] == 'Attack Pattern')
            ):
                for galaxy_entity in galaxy['GalaxyCluster']:
                    if ' - T' in galaxy_entity['value']:
                        name = galaxy_entity['value'].split(' - T')[0]
                    else:
                        name = galaxy_entity['value']
                    if 'meta' in galaxy_entity and 'synonyms' in galaxy_entity['meta']:
                        aliases = galaxy_entity['meta']['synonyms']
                    else:
                        aliases = [name]
                    if name not in added_names:
                        elements['attack_patterns'].append(AttackPattern(
                            name=name,
                            labels=['attack-pattern'],
                            description=galaxy_entity['description'],
                            custom_properties={
                                'x_opencti_external_id': galaxy_entity['meta']['external_id'][0],
                                'x_opencti_aliases': aliases,
                            }
                        ))
                        added_names.append(name)
        return elements

    def resolve_type(self, type, value):
        types = {
            'md5': ['file-md5'],
            'sha1': ['file-sha1'],
            'sha256': ['file-sha256'],
            'filename': ['file-name'],
            'pdb': ['pdb-path'],
            'filename|md5': ['file-name', 'file-md5'],
            'filename|sha1': ['file-name', 'file-sha1'],
            'filename|sha256': ['file-name', 'file-sha256'],
            'ip-src': ['ipv4-addr'],
            'ip-dst': ['ipv4-addr'],
            'hostname': ['domain'],
            'domain': ['domain'],
            'domain|ip': ['domain', 'ipv4-addr'],
            'url': ['url'],
            'windows-service-name': ['windows-service-name'],
            'windows-service-displayname': ['windows-service-display-name'],
            'windows-scheduled-task': ['windows-scheduled-task']
        }
        if type in types:
            resolved_types = types[type]
            if len(resolved_types) == 2:
                values = value.split('|')
                if resolved_types[0] == 'ipv4-addr':
                    type_0 = self.detect_ip_version(values[0])
                else:
                    type_0 = resolved_types[0]
                if resolved_types[1] == 'ipv4-addr':
                    type_1 = self.detect_ip_version(values[1])
                else:
                    type_1 = resolved_types[1]
                return [{'type': type_0, 'value': values[0]}, {'type': type_1, 'value': values[1]}]
            else:
                if resolved_types[0] == 'ipv4-addr':
                    type_0 = self.detect_ip_version(value)
                else:
                    type_0 = resolved_types[0]
                return [{'type': type_0, 'value': value}]

    def detect_ip_version(self, value):
        if len(value) > 16:
            return 'ipv6-addr'
        else:
            return 'ipv4-addr'

    def resolve_markings(self, tags, with_default=True):
        markings = []
        for tag in tags:
            if tag['name'] == 'tlp:white':
                markings.append(TLP_WHITE)
            if tag['name'] == 'tlp:green':
                markings.append(TLP_GREEN)
            if tag['name'] == 'tlp:amber':
                markings.append(TLP_AMBER)
            if tag['name'] == 'tlp:red':
                markings.append(TLP_RED)
        if len(markings) == 0 and with_default:
            markings.append(TLP_WHITE)
        return markings
Beispiel #18
0
class Sekoia(object):

    limit = 200

    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)

        self._cache = {}
        # Extra config
        self.base_url = self.get_config("base_url", config,
                                        "https://api.sekoia.io")
        self.start_date: str = self.get_config("start_date", config, None)
        self.collection = self.get_config(
            "collection", config, "d6092c37-d8d7-45c3-8aff-c4dc26030608")
        self.create_observables = self.get_config("create_observables", config,
                                                  True)

        self.helper.log_info("Setting up api key")
        self.api_key = self.get_config("api_key", config)
        if not self.api_key:
            self.helper.log_error("API key is Missing")
            raise ValueError("API key is Missing")

        self._load_data_sets()
        self.helper.log_info("All datasets has been loaded")

    def run(self):
        self.helper.log_info("Starting SEKOIA.IO connector")
        state = self.helper.get_state() or {}
        cursor = state.get("last_cursor", self.generate_first_cursor())
        self.helper.log_info(f"Starting with {cursor}")
        while True:
            friendly_name = "SEKOIA run @ " + datetime.utcnow().strftime(
                "%Y-%m-%d %H:%M:%S")
            work_id = self.helper.api.work.initiate_work(
                self.helper.connect_id, friendly_name)
            try:
                cursor = self._run(cursor, work_id)
                message = f"Connector successfully run, cursor updated to {cursor}"
                self.helper.log_info(message)
                self.helper.api.work.to_processed(work_id, message)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                self.helper.api.work.to_processed(work_id,
                                                  "Connector is stopping")
                exit(0)
            except Exception as ex:
                # In case of error try to get the last updated cursor
                # since `_run` updates it after every successful request
                state = self.helper.get_state() or {}
                cursor = state.get("last_cursor", cursor)
                self.helper.log_error(str(ex))
                message = f"Connector encountered an error, cursor updated to {cursor}"
                self.helper.api.work.to_processed(work_id, message)

            time.sleep(60)

    @staticmethod
    def get_config(name: str, config, default: Any = None):
        env_name = f"SEKOIA_{name.upper()}"
        result = get_config_variable(env_name, ["sekoia", name], config)
        return result or default

    def get_collection_url(self):
        return urljoin(self.base_url, "v2/inthreat/collections",
                       self.collection, "objects")

    def get_object_url(self, ids: Iterable):
        return urljoin(self.base_url, "v2/inthreat/objects", ",".join(ids))

    def get_relationship_url(self, ids: Iterable):
        return urljoin(self.base_url, "v2/inthreat/relationships",
                       ",".join(ids))

    def get_file_url(self, item_id: str, file_hash: str):
        return urljoin(self.base_url, "v2/inthreat/objects", item_id, "files",
                       file_hash)

    def generate_first_cursor(self) -> str:
        """
        Generate the first cursor to interrogate the API
        so we don't start at the beginning.
        """
        start = f"{(datetime.utcnow() - timedelta(hours=1)).isoformat()}Z"
        if self.start_date:
            try:
                start = f"{parse(self.start_date).isoformat()}Z"
            except ParserError:
                pass

        return base64.b64encode(start.encode("utf-8")).decode("utf-8")

    @staticmethod
    def chunks(items, chunk_size):
        """
        Yield successive n-sized chunks from items.
        """
        for i in range(0, len(items), chunk_size):
            yield items[i:i + chunk_size]

    def _run(self, cursor, work_id):
        while True:
            params = {"limit": self.limit, "cursor": cursor}

            data = self._send_request(self.get_collection_url(), params)
            if not data:
                return cursor

            cursor = data[
                "next_cursor"] or cursor  # In case next_cursor is None
            items = data["items"]
            if not items:
                return cursor

            items = self._retrieve_references(items)
            self._add_main_observable_type_to_indicators(items)
            if self.create_observables:
                self._add_create_observables_to_indicators(items)
            items = self._clean_ic_fields(items)
            self._add_files_to_items(items)
            bundle = self.helper.stix2_create_bundle(items)
            try:
                self.helper.send_stix2_bundle(bundle,
                                              update=True,
                                              work_id=work_id)
            except RecursionError:
                self.helper.send_stix2_bundle(bundle,
                                              update=True,
                                              work_id=work_id,
                                              bypass_split=True)

            self.helper.set_state({"last_cursor": cursor})
            if len(items) < self.limit:
                # We got the last results
                return cursor

    def _clean_ic_fields(self, items: List[Dict]) -> List[Dict]:
        """
        Remove fields specific to the Intelligence Center
        that will not add value in OpenCTI
        """
        return [{
            field: value
            for field, value in item.items()
            if not self._field_to_ignore(field)
        } for item in items]

    @staticmethod
    def _field_to_ignore(field: str) -> bool:
        to_ignore = [
            "x_ic_impacted_locations",
            "x_ic_impacted_sectors",
        ]
        return ((field.startswith("x_ic") or field.startswith("x_inthreat"))
                and (field.endswith("ref")
                     or field.endswith("refs"))) or field in to_ignore

    @staticmethod
    def _add_create_observables_to_indicators(items: List[Dict]):
        for item in items:
            if item.get("type") == "indicator":
                item["x_opencti_create_observables"] = True

    @staticmethod
    def _add_main_observable_type_to_indicators(items: List[Dict]):
        for item in items:
            if (item.get("type") == "indicator"
                    and item.get("x_ic_observable_types") is not None
                    and len(item.get("x_ic_observable_types")) > 0):
                stix_type = item.get("x_ic_observable_types")[0]
                item[
                    "x_opencti_main_observable_type"] = OpenCTIStix2Utils.stix_observable_opencti_type(
                        stix_type)

    def _retrieve_references(self,
                             items: List[Dict],
                             current_depth: int = 0) -> List[Dict]:
        """
        Retrieve the references that appears in the given items.

        To avoid having an infinite recursion a safe guard has been implemented.
        """
        if current_depth == 5:
            # Safe guard to avoid infinite recursion if an object was not found for example
            return items

        items = self._update_mapped_refs(items)
        to_fetch = self._get_missing_refs(items)
        for ref in list(to_fetch):
            if ref in self._cache:
                items.append(self._cache[ref])
                to_fetch.remove(ref)
        if not to_fetch:
            return items

        objects_to_fetch = [
            i for i in to_fetch if not i.startswith("relationship--")
        ]
        items += self._retrieve_by_ids(objects_to_fetch, self.get_object_url)

        relationships_to_fetch = [
            i for i in to_fetch if i.startswith("relationship--")
        ]
        items += self._retrieve_by_ids(relationships_to_fetch,
                                       self.get_relationship_url)
        return self._retrieve_references(items, current_depth + 1)

    def _get_missing_refs(self, items: List[Dict]) -> Set:
        """
        Get the object's references that are missing
        """
        ids = {item["id"] for item in items}
        refs = set()
        for item in items:
            refs.update(item.get("object_marking_refs", []))
            if item.get("created_by_ref"):
                refs.add(item["created_by_ref"])
            if item["type"] == "report":
                object_refs = [
                    ref for ref in item.get("object_refs", [])
                    if not self._is_mapped_ref(ref)
                ]
                refs.update(object_refs)
            if item["type"] == "relationship":
                if not self._is_mapped_ref(item["source_ref"]):
                    refs.add(item["source_ref"])
                if not self._is_mapped_ref(item["target_ref"]):
                    refs.add(item["target_ref"])
        return refs - ids

    def _is_mapped_ref(self, ref: str) -> bool:
        """
        Whether or not the reference is a mapped one.
        """
        return (ref in self._geography_mapping.values()
                or ref in self._sectors_mapping.values())

    def _update_mapped_refs(self, items: List[Dict]):
        """
        Update references that are mapped between SEKOIA and OpenCTI.

        This way we will be able to create links with OpenCTI own sectors and locations.
        """
        for item in items:
            if item.get("object_marking_refs"):
                item["object_marking_refs"] = self._replace_mapped_refs(
                    item["object_marking_refs"])
            if item.get("object_refs"):
                item["object_refs"] = self._replace_mapped_refs(
                    item["object_refs"])
            if item.get("source_ref"):
                item["source_ref"] = self._get_mapped_ref(item["source_ref"])
            if item.get("target_ref"):
                item["target_ref"] = self._get_mapped_ref(item["target_ref"])
        return items

    def _replace_mapped_refs(self, refs: List):
        for i, ref in enumerate(refs):
            refs[i] = self._get_mapped_ref(ref)
        return refs

    def _get_mapped_ref(self, ref: str):
        if ref in self._geography_mapping:
            return self._geography_mapping[ref]
        if ref in self._sectors_mapping:
            return self._sectors_mapping[ref]
        return ref

    def _retrieve_by_ids(self, ids, url_callback):
        """
        Fetch the items for the given ids.
        """
        items = []
        for chunk in self.chunks(ids, 40):
            url = url_callback(chunk)
            res = self._send_request(url)
            if not res:
                continue
            if "items" in res:
                items.extend(res["items"])
                for item in res["items"]:
                    self._clean_and_add_to_cache(item)
            if "data" in res:
                items.append(res["data"])
                self._clean_and_add_to_cache(res["data"])
        return items

    def _clean_and_add_to_cache(self, item):
        """
        Add item to the cache only if it is an identity or a marking definition
        """
        if item["id"].startswith(
                "marking-definition--") or item["id"].startswith("identity--"):
            if item["id"].startswith("marking-definition--"):
                item.pop("object_marking_refs", None)
            self._cache[item["id"]] = item

    def _send_request(self, url, params=None, binary=False):
        """
        Sends the HTTP request and handle the errors
        """
        try:
            headers = {"Authorization": f"Bearer {self.api_key}"}
            res = requests.get(url, params=params, headers=headers)
            res.raise_for_status()
            if binary:
                return res.content
            return res.json()
        except RequestException as ex:
            if ex.response:
                error = f"Request failed with status: {ex.response.status_code}"
                self.helper.log_error(error)
            else:
                self.helper.log_error(str(ex))
            return None

    def _load_data_sets(self):
        # Mapping between SEKOIA sectors/locations and OpenCTI ones
        self.helper.log_info("Loading locations mapping")
        with open("./data/geography_mapping.json") as fp:
            self._geography_mapping: Dict = json.load(fp)

        self.helper.log_info("Loading sectors mapping")
        with open("./data/sectors_mapping.json") as fp:
            self._sectors_mapping: Dict = json.load(fp)

        # Adds OpenCTI sectors/locations to cache
        self.helper.log_info("Loading OpenCTI sectors")
        with open("./data/sectors.json") as fp:
            objects = json.load(fp)["objects"]
            for sector in objects:
                self._clean_and_add_to_cache(sector)

        self.helper.log_info("Loading OpenCTI locations")
        with open("./data/geography.json") as fp:
            for geography in json.load(fp)["objects"]:
                self._clean_and_add_to_cache(geography)

    def _add_files_to_items(self, items: List[Dict]):
        for item in items:
            if not item.get("x_inthreat_uploaded_files"):
                continue
            item["x_opencti_files"] = []
            for file in item.get("x_inthreat_uploaded_files", []):
                url = self.get_file_url(item["id"], file["sha256"])
                data = self._send_request(url, binary=True)
                if data:
                    item["x_opencti_files"].append({
                        "name":
                        file["file_name"],
                        "data":
                        base64.b64encode(data).decode("utf-8"),
                        "mime_type":
                        file.get("mime_type", "text/plain"),
                    })
Beispiel #19
0
class CrowdStrike:
    """CrowdStrike connector."""

    _CONFIG_NAMESPACE = "crowdstrike"

    _CONFIG_BASE_URL = f"{_CONFIG_NAMESPACE}.base_url"
    _CONFIG_CLIENT_ID = f"{_CONFIG_NAMESPACE}.client_id"
    _CONFIG_CLIENT_SECRET = f"{_CONFIG_NAMESPACE}.client_secret"
    _CONFIG_INTERVAL_SEC = f"{_CONFIG_NAMESPACE}.interval_sec"
    _CONFIG_SCOPES = f"{_CONFIG_NAMESPACE}.scopes"
    _CONFIG_TLP = f"{_CONFIG_NAMESPACE}.tlp"
    _CONFIG_CREATE_OBSERVABLES = f"{_CONFIG_NAMESPACE}.create_observables"
    _CONFIG_CREATE_INDICATORS = f"{_CONFIG_NAMESPACE}.create_indicators"
    _CONFIG_ACTOR_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.actor_start_timestamp"
    _CONFIG_REPORT_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.report_start_timestamp"
    _CONFIG_REPORT_INCLUDE_TYPES = f"{_CONFIG_NAMESPACE}.report_include_types"
    _CONFIG_REPORT_STATUS = f"{_CONFIG_NAMESPACE}.report_status"
    _CONFIG_REPORT_TYPE = f"{_CONFIG_NAMESPACE}.report_type"
    _CONFIG_REPORT_GUESS_MALWARE = f"{_CONFIG_NAMESPACE}.report_guess_malware"
    _CONFIG_INDICATOR_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.indicator_start_timestamp"
    _CONFIG_INDICATOR_EXCLUDE_TYPES = f"{_CONFIG_NAMESPACE}.indicator_exclude_types"
    _CONFIG_INDICATOR_LOW_SCORE = f"{_CONFIG_NAMESPACE}.indicator_low_score"
    _CONFIG_INDICATOR_LOW_SCORE_LABELS = (
        f"{_CONFIG_NAMESPACE}.indicator_low_score_labels")

    _CONFIG_UPDATE_EXISTING_DATA = "connector.update_existing_data"

    _CONFIG_SCOPE_ACTOR = "actor"
    _CONFIG_SCOPE_REPORT = "report"
    _CONFIG_SCOPE_INDICATOR = "indicator"
    _CONFIG_SCOPE_YARA_MASTER = "yara_master"

    _CONFIG_REPORT_STATUS_MAPPING = {
        "new": 0,
        "in progress": 1,
        "analyzed": 2,
        "closed": 3,
    }

    _DEFAULT_CREATE_OBSERVABLES = True
    _DEFAULT_CREATE_INDICATORS = True
    _DEFAULT_REPORT_TYPE = "threat-report"
    _DEFAULT_INDICATOR_LOW_SCORE = 40

    _CONNECTOR_RUN_INTERVAL_SEC = 60

    _STATE_LAST_RUN = "last_run"

    def __init__(self) -> None:
        """Initialize CrowdStrike connector."""
        config = self._read_configuration()

        # CrowdStrike connector configuration
        base_url = self._get_configuration(config, self._CONFIG_BASE_URL)
        client_id = self._get_configuration(config, self._CONFIG_CLIENT_ID)
        client_secret = self._get_configuration(config,
                                                self._CONFIG_CLIENT_SECRET)

        self.interval_sec = self._get_configuration(config,
                                                    self._CONFIG_INTERVAL_SEC,
                                                    is_number=True)

        scopes_str = self._get_configuration(config, self._CONFIG_SCOPES)
        scopes = set()
        if scopes_str is not None:
            scopes = set(convert_comma_separated_str_to_list(scopes_str))

        tlp = self._get_configuration(config, self._CONFIG_TLP)
        tlp_marking = self._convert_tlp_to_marking_definition(tlp)

        create_observables = self._get_configuration(
            config, self._CONFIG_CREATE_OBSERVABLES)
        if create_observables is None:
            create_observables = self._DEFAULT_CREATE_OBSERVABLES
        else:
            create_observables = bool(create_observables)

        create_indicators = self._get_configuration(
            config, self._CONFIG_CREATE_INDICATORS)
        if create_indicators is None:
            create_indicators = self._DEFAULT_CREATE_INDICATORS
        else:
            create_indicators = bool(create_indicators)

        actor_start_timestamp = self._get_configuration(
            config, self._CONFIG_ACTOR_START_TIMESTAMP, is_number=True)
        if is_timestamp_in_future(actor_start_timestamp):
            raise ValueError("Actor start timestamp is in the future")

        report_start_timestamp = self._get_configuration(
            config, self._CONFIG_REPORT_START_TIMESTAMP, is_number=True)
        if is_timestamp_in_future(report_start_timestamp):
            raise ValueError("Report start timestamp is in the future")

        report_status_str = self._get_configuration(config,
                                                    self._CONFIG_REPORT_STATUS)
        report_status = self._convert_report_status_str_to_report_status_int(
            report_status_str)

        report_type = self._get_configuration(config, self._CONFIG_REPORT_TYPE)
        if not report_type:
            report_type = self._DEFAULT_REPORT_TYPE

        report_include_types_str = self._get_configuration(
            config, self._CONFIG_REPORT_INCLUDE_TYPES)
        report_include_types = []
        if report_include_types_str is not None:
            report_include_types = convert_comma_separated_str_to_list(
                report_include_types_str)

        report_guess_malware = bool(
            self._get_configuration(config, self._CONFIG_REPORT_GUESS_MALWARE))

        indicator_start_timestamp = self._get_configuration(
            config, self._CONFIG_INDICATOR_START_TIMESTAMP, is_number=True)
        if is_timestamp_in_future(indicator_start_timestamp):
            raise ValueError("Indicator start timestamp is in the future")

        indicator_exclude_types_str = self._get_configuration(
            config, self._CONFIG_INDICATOR_EXCLUDE_TYPES)
        indicator_exclude_types = []
        if indicator_exclude_types_str is not None:
            indicator_exclude_types = convert_comma_separated_str_to_list(
                indicator_exclude_types_str)

        indicator_low_score = self._get_configuration(
            config, self._CONFIG_INDICATOR_LOW_SCORE, is_number=True)
        if indicator_low_score is None:
            indicator_low_score = self._DEFAULT_INDICATOR_LOW_SCORE

        indicator_low_score_labels_str = self._get_configuration(
            config, self._CONFIG_INDICATOR_LOW_SCORE_LABELS)
        indicator_low_score_labels = []
        if indicator_low_score_labels_str is not None:
            indicator_low_score_labels = convert_comma_separated_str_to_list(
                indicator_low_score_labels_str)

        update_existing_data = bool(
            self._get_configuration(config, self._CONFIG_UPDATE_EXISTING_DATA))

        author = self._create_author()

        # Create OpenCTI connector helper.
        self.helper = OpenCTIConnectorHelper(config)

        # Create CrowdStrike client and importers.
        client = CrowdStrikeClient(base_url, client_id, client_secret)

        # Create importers.
        importers: List[BaseImporter] = []

        if self._CONFIG_SCOPE_ACTOR in scopes:
            actor_importer = ActorImporter(
                self.helper,
                client.intel_api.actors,
                update_existing_data,
                author,
                actor_start_timestamp,
                tlp_marking,
            )

            importers.append(actor_importer)

        if self._CONFIG_SCOPE_REPORT in scopes:
            report_importer = ReportImporter(
                self.helper,
                client.intel_api.reports,
                update_existing_data,
                author,
                report_start_timestamp,
                tlp_marking,
                report_include_types,
                report_status,
                report_type,
                report_guess_malware,
            )

            importers.append(report_importer)

        if self._CONFIG_SCOPE_INDICATOR in scopes:
            indicator_importer_config = IndicatorImporterConfig(
                helper=self.helper,
                indicators_api=client.intel_api.indicators,
                reports_api=client.intel_api.reports,
                update_existing_data=update_existing_data,
                author=author,
                default_latest_timestamp=indicator_start_timestamp,
                tlp_marking=tlp_marking,
                create_observables=create_observables,
                create_indicators=create_indicators,
                exclude_types=indicator_exclude_types,
                report_status=report_status,
                report_type=report_type,
                indicator_low_score=indicator_low_score,
                indicator_low_score_labels=set(indicator_low_score_labels),
            )

            indicator_importer = IndicatorImporter(indicator_importer_config)
            importers.append(indicator_importer)

        if self._CONFIG_SCOPE_YARA_MASTER in scopes:
            yara_master_importer = YaraMasterImporter(
                self.helper,
                client.intel_api.rules,
                client.intel_api.reports,
                author,
                tlp_marking,
                update_existing_data,
                report_status,
                report_type,
            )

            importers.append(yara_master_importer)

        self.importers = importers

    @staticmethod
    def _read_configuration() -> Dict[str, str]:
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/../config.yml"
        if not os.path.isfile(config_file_path):
            return {}
        return yaml.load(open(config_file_path), Loader=yaml.FullLoader)

    @staticmethod
    def _create_author() -> Identity:
        return create_organization("CrowdStrike")

    @staticmethod
    def _get_yaml_path(config_name: str) -> List[str]:
        return config_name.split(".")

    @staticmethod
    def _get_environment_variable_name(yaml_path: List[str]) -> str:
        return "_".join(yaml_path).upper()

    @classmethod
    def _get_configuration(cls,
                           config: Dict[str, Any],
                           config_name: str,
                           is_number: bool = False) -> Any:
        yaml_path = cls._get_yaml_path(config_name)
        env_var_name = cls._get_environment_variable_name(yaml_path)
        config_value = get_config_variable(env_var_name,
                                           yaml_path,
                                           config,
                                           isNumber=is_number)
        return config_value

    @classmethod
    def _convert_tlp_to_marking_definition(
            cls, tlp_value: Optional[str]) -> MarkingDefinition:
        if tlp_value is None:
            return DEFAULT_TLP_MARKING_DEFINITION
        return get_tlp_string_marking_definition(tlp_value)

    @classmethod
    def _convert_report_status_str_to_report_status_int(
            cls, report_status: str) -> int:
        return cls._CONFIG_REPORT_STATUS_MAPPING[report_status.lower()]

    def _load_state(self) -> Dict[str, Any]:
        current_state = self.helper.get_state()
        if not current_state:
            return {}
        return current_state

    @staticmethod
    def _get_state_value(state: Optional[Mapping[str, Any]],
                         key: str,
                         default: Optional[Any] = None) -> Any:
        if state is not None:
            return state.get(key, default)
        return default

    @classmethod
    def _sleep(cls, delay_sec: Optional[int] = None) -> None:
        sleep_delay = (delay_sec if delay_sec is not None else
                       cls._CONNECTOR_RUN_INTERVAL_SEC)
        time.sleep(sleep_delay)

    def _is_scheduled(self, last_run: Optional[int],
                      current_time: int) -> bool:
        if last_run is None:
            self._info("CrowdStrike connector clean run")
            return True

        time_diff = current_time - last_run
        return time_diff >= self._get_interval()

    @staticmethod
    def _current_unix_timestamp() -> int:
        return int(time.time())

    def run(self):
        """Run CrowdStrike connector."""
        self._info("Starting CrowdStrike connector...")

        if not self.importers:
            self._error("Scope(s) not configured.")
            return

        while True:
            self._info("Running CrowdStrike connector...")
            run_interval = self._CONNECTOR_RUN_INTERVAL_SEC

            try:
                timestamp = self._current_unix_timestamp()
                current_state = self._load_state()

                self.helper.log_info(f"Loaded state: {current_state}")

                last_run = self._get_state_value(current_state,
                                                 self._STATE_LAST_RUN)
                if self._is_scheduled(last_run, timestamp):
                    work_id = self._initiate_work(timestamp)

                    new_state = current_state.copy()

                    for importer in self.importers:
                        importer_state = importer.start(work_id, new_state)
                        new_state.update(importer_state)

                        self._info("Storing updated new state: {0}", new_state)
                        self.helper.set_state(new_state)

                    new_state[
                        self._STATE_LAST_RUN] = self._current_unix_timestamp()

                    self._info("Storing new state: {0}", new_state)
                    self.helper.set_state(new_state)

                    message = (
                        f"State stored, next run in: {self._get_interval()} seconds"
                    )

                    self._info(message)

                    self._complete_work(work_id, message)
                else:
                    next_run = self._get_interval() - (timestamp - last_run)
                    run_interval = min(run_interval, next_run)

                    self._info(
                        "Connector will not run, next run in: {0} seconds",
                        next_run)

                self._sleep(delay_sec=run_interval)
            except (KeyboardInterrupt, SystemExit):
                self._info("CrowdStrike connector stopping...")
                exit(0)
            except Exception as e:  # noqa: B902
                self._error("CrowdStrike connector internal error: {0}",
                            str(e))
                self._sleep()

    def _initiate_work(self, timestamp: int) -> str:
        datetime_str = timestamp_to_datetime(timestamp)
        friendly_name = f"{self.helper.connect_name} @ {datetime_str}"
        work_id = self.helper.api.work.initiate_work(self.helper.connect_id,
                                                     friendly_name)

        self._info("New work '{0}' initiated", work_id)

        return work_id

    def _complete_work(self, work_id: str, message: str) -> None:
        self.helper.api.work.to_processed(work_id, message)

    def _get_interval(self) -> int:
        return int(self.interval_sec)

    def _info(self, msg: str, *args: Any) -> None:
        fmt_msg = msg.format(*args)
        self.helper.log_info(fmt_msg)

    def _error(self, msg: str, *args: Any) -> None:
        fmt_msg = msg.format(*args)
        self.helper.log_error(fmt_msg)
Beispiel #20
0
class TaniumConnector:
    def __init__(self):
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.tanium_url = get_config_variable("TANIUM_URL", ["tanium", "url"], config)
        self.tanium_ssl_verify = get_config_variable(
            "TANIUM_SSL_VERIFY", ["tanium", "ssl_verify"], config, False, True
        )
        self.tanium_login = get_config_variable(
            "TANIUM_LOGIN", ["tanium", "login"], config
        )
        self.tanium_password = get_config_variable(
            "TANIUM_PASSWORD", ["tanium", "password"], config
        )
        self.tanium_indicator_types = get_config_variable(
            "TANIUM_INDICATOR_TYPES", ["tanium", "indicator_types"], config
        ).split(",")
        self.tanium_observable_types = get_config_variable(
            "TANIUM_OBSERVABLE_TYPES", ["tanium", "observable_types"], config
        ).split(",")
        self.tanium_import_label = get_config_variable(
            "TANIUM_IMPORT_LABEL", ["tanium", "import_label"], config, False, ""
        )
        self.tanium_import_from_date = get_config_variable(
            "TANIUM_IMPORT_FROM_DATE", ["tanium", "import_from_date"], config
        )
        self.tanium_reputation_blacklist_label = get_config_variable(
            "TANIUM_REPUTATION_BLACKLIST_LABEL",
            ["tanium", "reputation_blacklist_label"],
            config,
            False,
            "",
        )
        self.tanium_auto_quickscan = get_config_variable(
            "TANIUM_AUTO_QUICKSCAN", ["tanium", "auto_quickscan"], config, False, False
        )
        self.tanium_computer_groups = get_config_variable(
            "TANIUM_COMPUTER_GROUPS", ["tanium", "computer_groups"], config, False, ""
        ).split(",")

        # Variables
        self.session = None

        # Open a session
        self._get_session()

        # Create the state
        if self.tanium_import_from_date:
            timestamp = (
                parse(self.tanium_import_from_date).timestamp() * 1000
                if self.tanium_import_from_date != "now"
                else int(round(time.time() * 1000)) - 1000
            )
            current_state = self.helper.get_state()
            if current_state is None:
                self.helper.set_state({"connectorLastEventId": timestamp})

        # Create the source if not exist
        self.source_id = None
        sources = self._query("get", "/plugin/products/detect3/api/v1/sources")
        for source in sources:
            if source["name"] == "OpenCTI":
                self.source_id = source["id"]
        if self.source_id is None:
            source = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources",
                {
                    "type": "api-client",
                    "name": "OpenCTI",
                    "description": "Cyber Threat Intelligence knowledge imported from OpenCTI.",
                    "canAutoQuickScan": True,
                },
            )
            self.source_id = source["id"]

    def _get_session(self):
        payload = {
            "username": self.tanium_login,
            "password": self.tanium_password,
        }
        r = requests.post(
            self.tanium_url + "/api/v2/session/login",
            json=payload,
            verify=self.tanium_ssl_verify,
        )
        if r.status_code == 200:
            result = r.json()
            self.session = result["data"]["session"]
        else:
            raise ValueError("Cannot login to the Tanium API")

    def _query(
        self,
        method,
        uri,
        payload=None,
        content_type="application/json",
        type=None,
        retry=False,
    ):
        self.helper.log_info("Query " + method + " on " + uri)
        headers = {"session": self.session}
        if method != "upload":
            headers["content-type"] = content_type
        if type is not None:
            headers["type"] = type
        if content_type == "application/octet-stream":
            headers["content-disposition"] = (
                "attachment; filename=" + payload["filename"]
            )
            if "name" in payload:
                headers["name"] = payload["name"]
            if "description" in payload:
                headers["description"] = payload["description"]
        if method == "get":
            r = requests.get(
                self.tanium_url + uri,
                headers=headers,
                params=payload,
                verify=self.tanium_ssl_verify,
            )
        elif method == "post":
            if content_type == "application/octet-stream":
                r = requests.post(
                    self.tanium_url + uri,
                    headers=headers,
                    data=payload["document"],
                    verify=self.tanium_ssl_verify,
                )
            elif type is not None:
                r = requests.post(
                    self.tanium_url + uri,
                    headers=headers,
                    data=payload["intelDoc"],
                    verify=self.tanium_ssl_verify,
                )
            else:
                r = requests.post(
                    self.tanium_url + uri,
                    headers=headers,
                    json=payload,
                    verify=self.tanium_ssl_verify,
                )
        elif method == "upload":
            f = open(payload["filename"], "w")
            f.write(payload["content"])
            f.close()
            files = {"hash": open(payload["filename"], "rb")}
            r = requests.post(
                self.tanium_url + uri,
                headers=headers,
                files=files,
                verify=self.tanium_ssl_verify,
            )
        elif method == "put":
            if content_type == "application/xml":
                r = requests.put(
                    self.tanium_url + uri,
                    headers=headers,
                    data=payload,
                    verify=self.tanium_ssl_verify,
                )
            else:
                r = requests.put(
                    self.tanium_url + uri,
                    headers=headers,
                    json=payload,
                    verify=self.tanium_ssl_verify,
                )
        elif method == "patch":
            r = requests.patch(
                self.tanium_url + uri,
                headers=headers,
                json=payload,
                verify=self.tanium_ssl_verify,
            )
        elif method == "delete":
            r = requests.delete(
                self.tanium_url + uri, headers=headers, verify=self.tanium_ssl_verify
            )
        else:
            raise ValueError("Unspported method")
        if r.status_code == 200:
            try:
                return r.json()
            except:
                return r.text
        elif r.status_code == 401 and not retry:
            self._get_session()
            return self._query(method, uri, payload, content_type, type, True)
        elif r.status_code == 401:
            raise ValueError("Query failed, permission denied")
        else:
            self.helper.log_info(r.text)

    def _get_labels(self, labels):
        # List labels
        tanium_labels = self._query(
            "get", "/plugin/products/detect3/api/v1/labels", {"limit": 500}
        )
        tanium_labels_dict = {}
        for tanium_label in tanium_labels:
            tanium_labels_dict[tanium_label["name"].lower()] = tanium_label
        final_labels = []
        for label in labels:
            # Label already exists
            if label["value"] in tanium_labels_dict:
                final_labels.append(tanium_labels_dict[label["value"]])
            # Create the label
            else:
                created_label = self._query(
                    "post",
                    "/plugin/products/detect3/api/v1/labels",
                    {
                        "name": label["value"],
                        "description": "Label imported from OpenCTI",
                    },
                )
                final_labels.append(created_label)
        return final_labels

    def _get_by_id(self, internal_id, yara=False):
        if yara:
            response = self._query(
                "get",
                "/plugin/products/detect3/api/v1/intels",
                {"name": internal_id + ".yara"},
            )
        else:
            response = self._query(
                "get",
                "/plugin/products/detect3/api/v1/intels",
                {"description": internal_id},
            )
        if response and len(response) > 0:
            return response[0]
        else:
            return None

    def _get_reputation_by_hash(self, hash):
        response = self._query(
            "get",
            "/plugin/products/reputation/v3/reputations/custom",
            {"search": hash},
        )
        if response["data"] and len(response["data"]) > 0:
            return response["data"][0]
        else:
            return None

    def _create_indicator_stix(self, entity, original_intel_document=None):
        if original_intel_document is None:
            intel_document = self._get_by_id(entity["id"])
            if intel_document is not None:
                return intel_document
        stix2_bundle = self.helper.api.stix2.export_entity(
            entity["entity_type"],
            entity["id"],
            "simple",
            None,
            True,
            True,
        )
        initialize_options()
        stix_indicator = slide_string(stix2_bundle)
        stix_indicator = re.sub(
            r"<indicator:Description>(.*?)<\/indicator:Description>",
            r"<indicator:Description>" + entity["id"] + "</indicator:Description>",
            stix_indicator,
        )
        stix_indicator = re.sub(
            r"<indicator:Description ordinality=\"1\">(.*?)<\/indicator:Description>",
            r'<indicator:Description ordinality="1">'
            + entity["id"]
            + "</indicator:Description>",
            stix_indicator,
        )
        payload = {"intelDoc": stix_indicator}
        if original_intel_document is not None:
            intel_document = self._query(
                "put",
                "/plugin/products/detect3/api/v1/intels/"
                + str(original_intel_document["id"]),
                stix_indicator,
                "application/xml",
                "stix",
            )
        else:
            intel_document = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources/"
                + str(self.source_id)
                + "/intels",
                payload,
                "application/xml",
                "stix",
            )
        return intel_document

    def _create_indicator_yara(self, entity, original_intel_document=None):
        if original_intel_document is None:
            intel_document = self._get_by_id(entity["id"], True)
            if intel_document is not None:
                return intel_document

        filename = entity["id"] + ".yara"
        if original_intel_document is not None:
            intel_document = self._query(
                "put",
                "/plugin/products/detect3/api/v1/intels/"
                + str(original_intel_document["id"]),
                {
                    "filename": filename,
                    "document": entity["pattern"],
                    "name": entity["name"],
                    "description": entity["id"],
                },
                "application/octet-stream",
                "yara",
            )
        else:
            intel_document = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources/"
                + str(self.source_id)
                + "/intels",
                {
                    "filename": filename,
                    "document": entity["pattern"],
                    "name": entity["name"],
                    "description": entity["id"],
                },
                "application/octet-stream",
                "yara",
            )
        return intel_document

    def _create_tanium_signal(self, entity, original_intel_document=None):
        if original_intel_document is None:
            intel_document = self._get_by_id(entity["id"])
            if intel_document is not None:
                return intel_document

        platforms = []
        if "x_mitre_platforms" in entity and len(entity["x_mitre_platforms"]) > 0:
            for x_mitre_platform in entity["x_mitre_platforms"]:
                if x_mitre_platform in ["Linux", "Windows", "macOS"]:
                    platforms.append(
                        x_mitre_platform.lower()
                        if x_mitre_platform != "macOS"
                        else "mac"
                    )
        if original_intel_document is not None:
            intel_document = self._query(
                "put",
                "/plugin/products/detect3/api/v1/intels/"
                + str(original_intel_document["id"]),
                {
                    "name": entity["name"],
                    "description": entity["id"],
                    "platforms": platforms,
                    "contents": entity["pattern"],
                },
            )
        else:
            intel_document = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources/"
                + str(self.source_id)
                + "/intels",
                {
                    "name": entity["name"],
                    "description": entity["id"],
                    "platforms": platforms,
                    "contents": entity["pattern"],
                },
            )
        return intel_document

    def _create_observable(self, entity, original_intel_document=None):
        if original_intel_document is None:
            intel_document = self._get_by_id(entity["id"])
            if intel_document is not None:
                return intel_document

        intel_type = None
        value = None
        name = None
        if entity["entity_type"] == "StixFile":
            intel_type = "file_hash"
            if "hashes" in entity:
                for hash in entity["hashes"]:
                    value = (
                        value + hash["hash"] + "\n"
                        if value is not None
                        else hash["hash"] + "\n"
                    )
                    name = hash["hash"]

        elif entity["entity_type"] in [
            "IPv4-Addr",
            "IPv6-Addr",
            "Domain-Name",
            "X-OpenCTI-Hostname",
        ]:
            intel_type = "ip_or_host"
            value = entity["value"]
            name = entity["value"]
        if intel_type is None or value is None:
            return None

        openioc = self._query(
            "post",
            "/plugin/products/detect3/api/v1/intels/quick-add",
            {
                "exact": True,
                "name": name,
                "description": entity["id"],
                "type": intel_type,
                "text": value,
            },
        )
        openioc = re.sub(
            r"<description>(.*?)<\/description>",
            r"<description>" + entity["id"] + "</description>",
            openioc,
        )
        payload = {"intelDoc": openioc}
        if original_intel_document is not None:
            intel_document = self._query(
                "put",
                "/plugin/products/detect3/api/v1/intels/"
                + str(original_intel_document["id"]),
                payload,
                "application/xml",
                "openioc",
            )
        else:
            intel_document = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources/"
                + str(self.source_id)
                + "/intels",
                payload,
                "application/xml",
                "openioc",
            )

        return intel_document

    def _post_operations(self, entity, intel_document):
        if intel_document is not None and entity is not None:
            if self.tanium_auto_quickscan:
                for computer_group in self.tanium_computer_groups:
                    self._query(
                        "post",
                        "/plugin/products/detect3/api/v1/quick-scans",
                        {
                            "computerGroupId": int(computer_group),
                            "intelDocId": intel_document["id"],
                        },
                    )

            external_reference = self.helper.api.external_reference.create(
                source_name="Tanium",
                url=self.tanium_url
                + "/#/thr_workbench/intel/"
                + str(intel_document["id"]),
                external_id=str(intel_document["id"]),
                description="Intel document within the Tanium platform.",
            )
            if entity["entity_type"] == "Indicator":
                self.helper.api.stix_domain_object.add_external_reference(
                    id=entity["id"], external_reference_id=external_reference["id"]
                )
            else:
                self.helper.api.stix_cyber_observable.add_external_reference(
                    id=entity["id"], external_reference_id=external_reference["id"]
                )
            if len(entity["objectLabel"]) > 0:
                labels = self._get_labels(entity["objectLabel"])
                for label in labels:
                    if label is not None:
                        self._query(
                            "put",
                            "/plugin/products/detect3/api/v1/intels/"
                            + str(intel_document["id"])
                            + "/labels",
                            {"id": label["id"]},
                        )

    def _process_intel(self, entity_type, data, original_intel_document=None):
        entity = None
        intel_document = None
        if entity_type == "indicator":
            entity = self.helper.api.indicator.read(id=data["data"]["x_opencti_id"])
            if (
                entity is None
                or entity["revoked"]
                or entity["pattern_type"] not in self.tanium_indicator_types
            ):
                return {"entity": entity, "intel_document": intel_document}
            if entity["pattern_type"] == "stix":
                intel_document = self._create_indicator_stix(
                    entity, original_intel_document
                )
            elif entity["pattern_type"] == "yara":
                intel_document = self._create_indicator_yara(
                    entity, original_intel_document
                )
            elif entity["pattern_type"] == "tanium-signal":
                intel_document = self._create_tanium_signal(
                    entity, original_intel_document
                )
        elif (
            StixCyberObservableTypes.has_value(entity_type)
            and entity_type.lower() in self.tanium_observable_types
        ):
            entity = self.helper.api.stix_cyber_observable.read(
                id=data["data"]["x_opencti_id"]
            )
            if entity is None:
                return {"entity": entity, "intel_document": intel_document}
            intel_document = self._create_observable(entity, original_intel_document)
        return {"entity": entity, "intel_document": intel_document}

    def _process_message(self, msg):
        data = json.loads(msg.data)
        entity_type = data["data"]["type"]
        # If not an indicator, not an observable to import and
        if (
            entity_type != "indicator"
            and entity_type not in self.tanium_observable_types
            and (
                "labels" in data["data"]
                and self.tanium_reputation_blacklist_label not in data["data"]["labels"]
            )
            and self.tanium_reputation_blacklist_label != "*"
        ):
            self.helper.log_info(
                "Not an indicator and not an observable to import, doing nothing"
            )
            return
        # Handle creation
        if msg.event == "create":
            # No label
            if (
                "labels" not in data["data"]
                and self.tanium_import_label != "*"
                and self.tanium_reputation_blacklist_label != "*"
            ):
                self.helper.log_info("No label marked as import, doing nothing")
                return
            # Import or blacklist labels are not in the given labels
            elif (
                (
                    "labels" in data["data"]
                    and self.tanium_import_label not in data["data"]["labels"]
                )
                and self.tanium_import_label != "*"
                and self.tanium_reputation_blacklist_label not in data["data"]["labels"]
                and self.tanium_reputation_blacklist_label != "*"
            ):
                self.helper.log_info(
                    "No label marked as import or no global label, doing nothing"
                )
                return
            # Revoked is true
            elif "revoked" in data["data"] and data["data"]["revoked"]:
                return
            if (
                "labels" in data["data"]
                and self.tanium_import_label in data["data"]["labels"]
            ) or self.tanium_import_label == "*":
                # Process intel
                processed_intel = self._process_intel(entity_type, data)
                intel_document = processed_intel["intel_document"]
                entity = processed_intel["entity"]
                # Create external reference and add object labels
                self._post_operations(entity, intel_document)
            if (
                "labels" in data["data"]
                and self.tanium_reputation_blacklist_label in data["data"]["labels"]
            ) or self.tanium_reputation_blacklist_label == "*":
                if "hashes" in data["data"]:
                    entry = {"list": "blacklist"}
                    if "MD5" in data["data"]["hashes"]:
                        entry["md5"] = data["data"]["hashes"]["MD5"]
                        entry["uploadedHash"] = data["data"]["hashes"]["MD5"]
                    else:
                        entry["md5"] = ""
                    if "SHA-1" in data["data"]["hashes"]:
                        entry["sha1"] = data["data"]["hashes"]["SHA-1"]
                        entry["uploadedHash"] = data["data"]["hashes"]["SHA-1"]
                    else:
                        entry["sha1"] = ""
                    if "SHA-256" in data["data"]["hashes"]:
                        entry["sha256"] = data["data"]["hashes"]["SHA-256"]
                        entry["uploadedHash"] = data["data"]["hashes"]["SHA-256"]
                    else:
                        entry["sha256"] = ""
                    entry["notes"] = ",".join(data["data"]["labels"])
                    self._query(
                        "post",
                        "/plugin/products/reputation/v3/reputations/custom/upload?append=true",
                        [entry],
                    )
        elif msg.event == "update":
            if (
                "x_data_update" in data["data"]
                and "add" in data["data"]["x_data_update"]
                and "labels" in data["data"]["x_data_update"]["add"]
            ):
                if self.tanium_reputation_blacklist_label in data["data"][
                    "x_data_update"
                ]["add"]["labels"] and StixCyberObservableTypes.has_value(
                    data["data"]["type"]
                ):
                    observable = self.helper.api.stix_cyber_observable.read(
                        id=data["data"]["id"]
                    )
                    observable = self.helper.api.stix2.generate_export(observable)
                    if "hashes" in observable:
                        entry = {"list": "blacklist"}
                        if "MD5" in observable["hashes"]:
                            entry["md5"] = observable["hashes"]["MD5"]
                            entry["uploadedHash"] = observable["hashes"]["MD5"]
                        else:
                            entry["md5"] = ""
                        if "SHA-1" in observable["hashes"]:
                            entry["sha1"] = observable["hashes"]["SHA-1"]
                            entry["uploadedHash"] = observable["hashes"]["SHA-1"]
                        else:
                            entry["sha1"] = ""
                        if "SHA-256" in observable["hashes"]:
                            entry["sha256"] = observable["hashes"]["SHA-256"]
                            entry["uploadedHash"] = observable["hashes"]["SHA-256"]
                        else:
                            entry["sha256"] = ""
                        entry["notes"] = ",".join(observable["labels"])
                        self._query(
                            "post",
                            "/plugin/products/reputation/v3/reputations/custom/upload?append=true",
                            [entry],
                        )
                if (
                    self.tanium_import_label
                    in data["data"]["x_data_update"]["add"]["labels"]
                ):
                    # Process intel
                    processed_intel = self._process_intel(entity_type, data)
                    intel_document = processed_intel["intel_document"]
                    entity = processed_intel["entity"]
                    # Create external reference and add object labels
                    self._post_operations(entity, intel_document)
                else:
                    entity = self.helper.api.indicator.read(
                        id=data["data"]["x_opencti_id"],
                        customAttributes="""
                        pattern_type
                    """,
                    )
                    intel_document = self._get_by_id(
                        data["data"]["x_opencti_id"],
                        yara=True
                        if entity is not None and entity["pattern_type"] == "yara"
                        else False,
                    )
                    if intel_document:
                        new_labels = []
                        for label in data["data"]["x_data_update"]["add"]["labels"]:
                            new_labels.append({"value": label})
                        labels = self._get_labels(new_labels)
                        for label in labels:
                            self._query(
                                "put",
                                "/plugin/products/detect3/api/v1/intels/"
                                + str(intel_document["id"])
                                + "/labels",
                                {"id": label["id"]},
                            )
            elif (
                "x_data_update" in data["data"]
                and "remove" in data["data"]["x_data_update"]
                and "labels" in data["data"]["x_data_update"]["remove"]
            ):
                if (
                    self.tanium_reputation_blacklist_label
                    in data["data"]["x_data_update"]["remove"]["labels"]
                ):
                    if "hashes" in data["data"]:
                        if "SHA-256" in data["data"]["hashes"]:
                            self._query(
                                "post",
                                "/plugin/products/reputation/v3/reputations/custom/delete",
                                [data["data"]["hashes"]["SHA-256"]],
                            )
                        if "SHA-1" in data["data"]["hashes"]:
                            self._query(
                                "post",
                                "/plugin/products/reputation/v3/reputations/custom/delete",
                                [data["data"]["hashes"]["SHA-1"]],
                            )
                        if "MD5" in data["data"]["hashes"]:
                            self._query(
                                "post",
                                "/plugin/products/reputation/v3/reputations/custom/delete",
                                [data["data"]["hashes"]["MD5"]],
                            )
                if (
                    self.tanium_import_label
                    in data["data"]["x_data_update"]["remove"]["labels"]
                ):
                    # Import label has been removed
                    intel_document = self._get_by_id(data["data"]["x_opencti_id"])
                    if intel_document is not None:
                        self._query(
                            "delete",
                            "/plugin/products/detect3/api/v1/intels/"
                            + str(intel_document["id"]),
                        )
                    # Remove external references
                    if entity_type == "indicator":
                        entity = self.helper.api.indicator.read(
                            id=data["data"]["x_opencti_id"]
                        )
                    else:
                        entity = self.helper.api.stix_cyber_observable.read(
                            id=data["data"]["x_opencti_id"]
                        )
                    if (
                        entity
                        and "externalReferences" in entity
                        and len(entity["externalReferences"]) > 0
                    ):
                        for external_reference in entity["externalReferences"]:
                            if external_reference["source_name"] == "Tanium":
                                self.helper.api.external_reference.delete(
                                    external_reference["id"]
                                )
                else:
                    intel_document = self._get_by_id(data["data"]["x_opencti_id"])
                    if intel_document:
                        new_labels = []
                        for label in data["data"]["x_data_update"]["remove"]["labels"]:
                            new_labels.append({"value": label})
                        labels = self._get_labels(new_labels)
                        for label in labels:
                            self._query(
                                "delete",
                                "/plugin/products/detect3/api/v1/intels/"
                                + str(intel_document["id"])
                                + "/labels/"
                                + str(label["id"]),
                            )
            elif (
                "x_data_update" in data["data"]
                and "replace" in data["data"]["x_data_update"]
            ):
                if entity_type == "indicator":
                    if "pattern" in data["data"]["x_data_update"]["replace"]:
                        intel_document = self._get_by_id(data["data"]["x_opencti_id"])
                        if intel_document is not None:
                            self._process_intel(entity_type, data, intel_document)
                    elif (
                        "value" in data["data"]["x_data_update"]["replace"]
                        or "hashes" in data["data"]["x_data_update"]["replace"]
                    ):
                        intel_document = self._get_by_id(data["data"]["x_opencti_id"])
                        if intel_document is not None:
                            self._process_intel(entity_type, data, intel_document)
                    elif (
                        "revoked" in data["data"]["x_data_update"]["replace"]
                        and data["data"]["x_data_update"]["replace"]["revoked"] == True
                    ):
                        intel_document = self._get_by_id(data["data"]["x_opencti_id"])
                        if intel_document is not None:
                            self._query(
                                "delete",
                                "/plugin/products/detect3/api/v1/intels/"
                                + str(intel_document["id"]),
                            )
                            # Remove external references
                            if entity_type == "indicator":
                                entity = self.helper.api.indicator.read(
                                    id=data["data"]["x_opencti_id"]
                                )
                            else:
                                entity = self.helper.api.stix_cyber_observable.read(
                                    id=data["data"]["x_opencti_id"]
                                )
                            if (
                                entity
                                and "externalReferences" in entity
                                and len(entity["externalReferences"]) > 0
                            ):
                                for external_reference in entity["externalReferences"]:
                                    if external_reference["source_name"] == "Tanium":
                                        self.helper.api.external_reference.delete(
                                            external_reference["id"]
                                        )
        elif msg.event == "delete":
            intel_document = self._get_by_id(data["data"]["x_opencti_id"])
            if intel_document is not None:
                self._query(
                    "delete",
                    "/plugin/products/detect3/api/v1/intels/"
                    + str(intel_document["id"]),
                )
            if data["data"]["type"] == "file":
                if "hashes" in data["data"]:
                    if "SHA-256" in data["data"]["hashes"]:
                        self._query(
                            "post",
                            "/plugin/products/reputation/v3/reputations/custom/delete",
                            [data["data"]["hashes"]["SHA-256"]],
                        )
                    if "SHA-1" in data["data"]["hashes"]:
                        self._query(
                            "post",
                            "/plugin/products/reputation/v3/reputations/custom/delete",
                            [data["data"]["hashes"]["SHA-1"]],
                        )
                    if "MD5" in data["data"]["hashes"]:
                        self._query(
                            "post",
                            "/plugin/products/reputation/v3/reputations/custom/delete",
                            [data["data"]["hashes"]["MD5"]],
                        )

    def start(self):
        self.alerts_gatherer = TaniumConnectorAlertsGatherer(
            self.helper,
            self.tanium_url,
            self.tanium_login,
            self.tanium_password,
            self.tanium_ssl_verify,
        )
        self.alerts_gatherer.start()
        self.helper.listen_stream(self._process_message)
Beispiel #21
0
class Cve:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + '/config.yml'
        config = yaml.load(open(config_file_path), Loader=yaml.FullLoader
                           ) if os.path.isfile(config_file_path) else {}
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.cve_nvd_data_feed = get_config_variable('CVE_NVD_DATA_FEED',
                                                     ['cve', 'nvd_data_feed'],
                                                     config)
        self.cve_interval = get_config_variable('CVE_INTERVAL',
                                                ['cve', 'interval'], config,
                                                True)
        self.update_existing_data = get_config_variable(
            'CONNECTOR_UPDATE_EXISTING_DATA',
            ['connector', 'update_existing_data'], config)

    def get_interval(self):
        return int(self.cve_interval) * 60 * 60 * 24

    def run(self):
        self.helper.log_info('Fetching CVE knowledge...')
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and 'last_run' in current_state:
                    last_run = current_state['last_run']
                    self.helper.log_info('Connector last run: ' +
                                         datetime.utcfromtimestamp(last_run).
                                         strftime('%Y-%m-%d %H:%M:%S'))
                else:
                    last_run = None
                    self.helper.log_info('Connector has never run')
                # If the last_run is more than interval-1 day
                if last_run is None or ((timestamp - last_run) > (
                    (int(self.cve_interval) - 1) * 60 * 60 * 24)):
                    # Downloading json.gz file
                    self.helper.log_info('Requesting the file')
                    urllib.request.urlretrieve(
                        self.cve_nvd_data_feed,
                        os.path.dirname(os.path.abspath(__file__)) +
                        '/data.json.gz')
                    # Unzipping the file
                    self.helper.log_info('Unzipping the file')
                    with gzip.open('data.json.gz', 'rb') as f_in:
                        with open('data.json', 'wb') as f_out:
                            shutil.copyfileobj(f_in, f_out)
                    # Converting the file to stix2
                    self.helper.log_info('Converting the file')
                    convert('data.json', 'data-stix2.json')
                    with open('data-stix2.json') as stix_json:
                        contents = stix_json.read()
                        self.helper.send_stix2_bundle(
                            contents, self.helper.connect_scope,
                            self.update_existing_data)

                    # Remove files
                    os.remove('data.json')
                    os.remove('data.json.gz')
                    os.remove('data-stix2.json')
                    # Store the current timestamp as a last run
                    self.helper.log_info(
                        'Connector successfully run, storing last_run as ' +
                        str(timestamp))
                    self.helper.set_state({'last_run': timestamp})
                    self.helper.log_info(
                        'Last_run stored, next run in: ' +
                        str(round(self.get_interval() / 60 / 60 / 24, 2)) +
                        ' days')
                    time.sleep(60)
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        'Connector will not run, next run in: ' +
                        str(round(new_interval / 60 / 60 / 24, 2)) + ' days')
                    time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info('Connector stop')
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
class ImportFilePdfObservables:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        self.create_indicator = get_config_variable(
            "PDF_OBSERVABLES_CREATE_INDICATOR",
            ["pdf_observables", "create_indicator"],
            config,
        )

    def _process_message(self, data):
        file_path = data["file_path"]
        file_name = os.path.basename(file_path)
        work_context = data["work_context"]
        file_uri = self.helper.opencti_url + file_path
        self.helper.log_info("Importing the file " + file_uri)
        # Get the file
        file_content = self.helper.api.fetch_opencti_file(file_uri, True)
        # Write the file
        path = "/tmp/" + file_name
        f = open(path, "wb")
        f.write(file_content)
        f.close()
        # Parse
        bundle = {
            "type": "bundle",
            "id": "bundle--" + str(uuid.uuid4()),
            "spec_version": "2.0",
            "objects": [],
        }
        observed_data = {
            "id": "observed-data--" + str(uuid.uuid4()),
            "type": "observed-data",
            "x_opencti_indicator_create": self.create_indicator,
            "objects": {},
        }
        i = 0
        parser = iocp.IOC_Parser(None, "pdf", True, "pdfminer", "json")
        parsed = parser.parse(path)
        os.remove(path)
        if parsed != []:
            for file in parsed:
                if file != None:
                    for page in file:
                        if page != []:
                            for match in page:
                                resolved_match = self.resolve_match(match)
                                if resolved_match:
                                    observable = {
                                        "type":
                                        resolved_match["type"],
                                        "x_opencti_observable_type":
                                        resolved_match["type"],
                                        "x_opencti_observable_value":
                                        resolved_match["value"],
                                        "x_opencti_indicator_create":
                                        self.create_indicator,
                                    }
                                    observed_data["objects"][i] = observable
                                    i += 1
        else:
            self.helper.log_error("Could not parse the report!")

        # Get context
        if len(observed_data["objects"]) > 0:
            bundle["objects"].append(observed_data)
            if work_context is not None and len(work_context) > 0:
                report = self.helper.api.report.read(id=work_context)
                if report is not None:
                    report_stix = {
                        "type":
                        "report",
                        "id":
                        report["stix_id_key"],
                        "name":
                        report["name"],
                        "description":
                        report["description"],
                        "published":
                        self.helper.api.stix2.format_date(report["published"]),
                        "object_refs": [],
                    }
                    report_stix["object_refs"].append(observed_data["id"])
                    bundle["objects"].append(report_stix)
            bundles_sent = self.helper.send_stix2_bundle(
                json.dumps(bundle), None, False, False)
            return [
                "Sent " + str(len(bundles_sent)) +
                " stix bundle(s) for worker import"
            ]

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)

    def resolve_match(self, match):
        types = {
            "MD5": ["File-MD5"],
            "SHA1": ["File-SHA1"],
            "SHA256": ["File-SHA256"],
            "Filename": ["File-Name"],
            "IP": ["IPv4-Addr"],
            "Host": ["Domain"],
            "Filepath": ["File-Name"],
            "URL": ["URL"],
            "Email": ["Email-Address"],
        }
        type = match["type"]
        value = match["match"]
        if type in types:
            resolved_types = types[type]
            if resolved_types[0] == "IPv4-Addr":
                type_0 = self.detect_ip_version(value)
            else:
                type_0 = resolved_types[0]
            return {"type": type_0, "value": value}
        else:
            return False

    def detect_ip_version(self, value):
        if len(value) > 16:
            return "IPv6-Addr"
        else:
            return "IPv4-Addr"
Beispiel #23
0
class Cryptolaemus:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.interval = 1  # 1 Day interval between each scraping
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.confidence_level = get_config_variable(
            "CONNECTOR_CONFIDENCE_LEVEL",
            ["connector", "confidence_level"],
            config,
            isNumber=True,
        )
        self.data = {}

    def get_interval(self):
        return int(self.interval) * 60 * 60 * 24

    def next_run(self, seconds):
        return

    def run(self):
        self.helper.log_info("Fetching Cryptolaemus Emotet's datasets...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info("Connector last run: " +
                                         datetime.utcfromtimestamp(last_run).
                                         strftime("%Y-%m-%d %H:%M:%S"))
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")
                # If the last_run is more than interval-1 day
                if last_run is None or ((timestamp - last_run) > (
                    (int(self.interval) - 1) * 60 * 60 * 24)):
                    self.helper.log_info("Connector will run!")

                    ## CORE ##

                    # get feed content
                    feed = feedparser.parse(
                        "https://paste.cryptolaemus.com/feed.xml")
                    # variables
                    Epoch1C2 = []  # List of C2 of Epoch1 Botnet
                    Epoch2C2 = []  # List of C2 of Epoch2 Botnet
                    Epoch3C2 = []  # List of C2 of Epoch3 Botnet
                    # We will only extract the last item
                    source = feed["items"][0][
                        "id"]  # Source of the data (id field in the rss feed)
                    date = feed["items"][0][
                        "updated"]  # Date of data (updated fild in the rss feed)
                    soup = BeautifulSoup(
                        feed["items"][0]["content"][0]["value"], "lxml"
                    )  # Content (html format) of the rss feed first item
                    # parsing of content's feed (IP:port couples are in HTML <code> with no id an no significant parent node. We get the right content by indicating right <code> in the input array
                    list1 = soup.find_all("code")[0].text.split("\n")
                    list2 = soup.find_all("code")[3].text.split("\n")
                    list3 = soup.find_all("code")[6].text.split("\n")
                    # parsing of the IP:port couples
                    for line in list1:
                        Epoch1C2.append(line.split(":"))
                    for line in list2:
                        Epoch2C2.append(line.split(":"))
                    for line in list3:
                        Epoch3C2.append(line.split(":"))
                    # Agregate
                    self.data = {
                        "Source": source,
                        "Date": date,
                        "Epoch1C2": Epoch1C2,
                        "Epoch2C2": Epoch2C2,
                        "Epoch3C2": Epoch3C2,
                    }

                    # Capitalize Cryptolaemus
                    organization = self.helper.api.identity.create(
                        type="Organization",
                        name="Cryptolaemus Team",
                        description=
                        "Team of Experts collecting and sharing daily update of C2 IP of Emotet's Epochs Botnets.",
                    )
                    external_reference = self.helper.api.external_reference.create(
                        source_name="Cryptolaemus Team's Emotet C2 update of "
                        + self.data["Date"],
                        url=self.data["Source"],
                    )
                    malware = self.helper.api.malware.create(
                        name="Emotet",
                        description=
                        "Emotet is a modular malware variant which is primarily used as a downloader for other malware variants such as TrickBot and IcedID. Emotet first emerged in June 2014 and has been primarily used to target the banking sector. (Citation: Trend Micro Banking Malware Jan 2019)",
                    )

                    # Capitalize Epoch1 C2
                    for ip in self.data["Epoch1C2"]:
                        indicator = self.helper.api.indicator.create(
                            name=ip[0],
                            description="Botnet Epoch1 C2 IP Adress. Port: " +
                            ip[1],
                            pattern_type="stix",
                            indicator_pattern="[ipv4-addr:value = '" + ip[0] +
                            "']",
                            main_observable_type="IPv4-Addr",
                            valid_from=self.data["Date"],
                        )
                        if "observableRefsIds" in indicator:
                            for observable_id in indicator[
                                    "observableRefsIds"]:
                                self.helper.api.stix_entity.add_external_reference(
                                    id=observable_id,
                                    external_reference_id=external_reference[
                                        "id"],
                                )

                        self.helper.api.stix_entity.add_external_reference(
                            id=indicator["id"],
                            external_reference_id=external_reference["id"],
                        )
                        relation = self.helper.api.stix_relation.create(
                            fromId=indicator["id"],
                            toId=malware["id"],
                            relationship_type="indicates",
                            first_seen=self.data["Date"],
                            last_seen=self.data["Date"],
                            description=
                            "IP Adress associated to Emotet Epoch1 botnet",
                            weight=self.confidence_level,
                            role_played="C2 Server",
                            createdByRef=organization["id"],
                            ignore_dates=True,
                            update=True,
                        )
                        self.helper.api.stix_entity.add_external_reference(
                            id=relation["id"],
                            external_reference_id=external_reference["id"],
                        )

                    # Capitalize Epoch2 C2
                    for ip in self.data["Epoch2C2"]:
                        indicator = self.helper.api.indicator.create(
                            name=ip[0],
                            description="Botnet Epoch2 C2 IP Adress. Port: " +
                            ip[1],
                            pattern_type="stix",
                            indicator_pattern="[ipv4-addr:value = '" + ip[0] +
                            "']",
                            main_observable_type="IPv4-Addr",
                            valid_from=self.data["Date"],
                            createdByRef=organization["id"],
                        )
                        self.helper.api.stix_entity.add_external_reference(
                            id=indicator["id"],
                            external_reference_id=external_reference["id"],
                        )
                        relation = self.helper.api.stix_relation.create(
                            fromType="Indicator",
                            fromId=indicator["id"],
                            toType="Malware",
                            toId=malware["id"],
                            relationship_type="indicates",
                            first_seen=self.data["Date"],
                            last_seen=self.data["Date"],
                            description=
                            "IP Adress associated to Emotet Epoch2 botnet.",
                            weight=self.confidence_level,
                            role_played="C2 Server",
                            createdByRef=organization["id"],
                            ignore_dates=True,
                            update=True,
                        )
                        self.helper.api.stix_entity.add_external_reference(
                            id=relation["id"],
                            external_reference_id=external_reference["id"],
                        )

                    # Capitalize Epoch3 C2
                    for ip in self.data["Epoch3C2"]:
                        indicator = self.helper.api.indicator.create(
                            name=ip[0],
                            description="Botnet Epoch3 C2 IP Adress. Port: " +
                            ip[1],
                            pattern_type="stix",
                            indicator_pattern="[ipv4-addr:value = '" + ip[0] +
                            "']",
                            main_observable_type="IPv4-Addr",
                            valid_from=self.data["Date"],
                            createdByRef=organization["id"],
                        )
                        self.helper.api.stix_entity.add_external_reference(
                            id=indicator["id"],
                            external_reference_id=external_reference["id"],
                        )
                        relation = self.helper.api.stix_relation.create(
                            fromType="Indicator",
                            fromId=indicator["id"],
                            toType="Malware",
                            toId=malware["id"],
                            relationship_type="indicates",
                            first_seen=self.data["Date"],
                            last_seen=self.data["Date"],
                            description=
                            "IP Adress associated to Emotet Epoch3 botnet.",
                            weight=self.confidence_level,
                            role_played="C2 Server",
                            createdByRef=organization["id"],
                            ignore_dates=True,
                            update=True,
                        )
                        self.helper.api.stix_entity.add_external_reference(
                            id=indicator["id"],
                            external_reference_id=external_reference["id"],
                        )

                    # Store the current timestamp as a last run
                    self.helper.log_info(
                        "Connector successfully run, storing last_run as " +
                        str(timestamp))
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.log_info(
                        "Last_run stored, next run in: " +
                        str(round(self.get_interval() / 60 / 60 / 24, 2)) +
                        " days")
                    time.sleep(60)
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        "Connector will not run, next run in: " +
                        str(round(new_interval / 60 / 60 / 24, 2)) + " days")
                    time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Beispiel #24
0
class HygieneConnector:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)
        self.warninglists = WarningLists()

        # Create Hygiene Tag
        self.label_hygiene = self.helper.api.label.create(
            value="Hygiene", color="#fc0341"
        )

    def _process_observable(self, observable) -> str:
        # Extract IPv4, IPv6 and Domain from entity data
        observable_value = observable["observable_value"]

        # Search in warninglist
        result = self.warninglists.search(observable_value)

        # Iterate over the hits
        if result:
            self.helper.log_info(
                "Hit found for %s in warninglists" % (observable_value)
            )

            for hit in result:
                self.helper.log_info(
                    "Type: %s | Name: %s | Version: %s | Descr: %s"
                    % (hit.type, hit.name, hit.version, hit.description)
                )

                # We set the score based on the number of warning list entries
                if len(result) >= 5:
                    score = "5"
                elif len(result) >= 3:
                    score = "10"
                elif len(result) == 1:
                    score = "15"
                else:
                    score = "20"

                self.helper.log_info(
                    f"number of hits ({len(result)}) setting score to {score}"
                )
                self.helper.api.stix_cyber_observable.add_label(
                    id=observable["id"], label_id=self.label_hygiene["id"]
                )
                self.helper.api.stix_cyber_observable.update_field(
                    id=observable["id"], key="x_opencti_score", value=score
                )
                for indicator_id in observable["indicatorsIds"]:
                    self.helper.api.stix_domain_object.add_label(
                        id=indicator_id, label_id=self.label_hygiene["id"]
                    )
                    self.helper.api.stix_domain_object.update_field(
                        id=indicator_id, key="x_opencti_score", value=score
                    )

                # Create external references
                external_reference_id = self.helper.api.external_reference.create(
                    source_name="misp-warninglist",
                    url="https://github.com/MISP/misp-warninglists/tree/main/"
                    + LIST_MAPPING[hit.name],
                    external_id=hit.name,
                    description=hit.description,
                )
                self.helper.api.stix_cyber_observable.add_external_reference(
                    id=observable["id"],
                    external_reference_id=external_reference_id["id"],
                )

            return "Observable value found on warninglist and tagged accordingly"

    def _process_message(self, data) -> str:
        entity_id = data["entity_id"]
        observable = self.helper.api.stix_cyber_observable.read(id=entity_id)
        return self._process_observable(observable)

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #25
0
class CyberThreatCoalition:

    _OBSERVABLE_PATH = {
        "Domain-Name": ["value"],
        "IPv4-Addr": ["value"],
        "File_sha256": ["hashes", "SHA-256"],
        "File_sha1": ["hashes", "SHA-1"],
        "File_md5": ["hashes", "MD5"],
        "Url": ["value"],
    }

    _INDICATOR_PATTERN = {
        "Domain-Name": "[domain-name:value = '{}']",
        "IPv4-Addr": "[ipv4-addr:value = '{}']",
        "File_sha256": "[file:hashes.SHA-256 = '{}']",
        "File_sha1": "[file:hashes.SHA-1 = '{}']",
        "File_md5": "[file:hashes.MD5 = '{}']",
        "Url": "[url:value = '{}']",
    }
    _STATE_LAST_RUN = "last_run"

    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.cyber_threat_coalition_interval = get_config_variable(
            "CYBER_THREAT_COALITION_INTERVAL",
            ["cyber-threat-coalition", "interval_sec"],
            config,
            True,
        )
        self.cyber_threat_coalition_base_url = get_config_variable(
            "CYBER_THREAT_COALITION_BASE_URL",
            ["cyber-threat-coalition", "base_url"],
            config,
            False,
        )
        self.cyber_threat_coalition_create_indicators = get_config_variable(
            "CYBER_THREAT_COALITION_CREATE_INDICATORS",
            ["cyber-threat-coalition", "create_indicators"],
            config,
        )
        self.cyber_threat_coalition_create_observables = get_config_variable(
            "CYBER_THREAT_COALITION_CREATE_OBSERVABLES",
            ["cyber-threat-coalition", "create_observables"],
            config,
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

    def get_interval(self) -> int:
        return int(self.cyber_threat_coalition_interval)

    @staticmethod
    def get_hash_type(hash_value):
        if re.match(r"^[0-9a-fA-F]{32}$", hash_value):
            return "File_md5"
        elif re.match(r"^[0-9a-fA-F]{40}$", hash_value):
            return "File_sha1"
        elif re.match(r"^[0-9a-fA-F]{64}$", hash_value):
            return "File_sha256"

    def fetch_and_send(self):
        timestamp = int(time.time())
        now = datetime.utcfromtimestamp(timestamp)
        friendly_name = "Cyber Threat Coalition run @ " + now.strftime(
            "%Y-%m-%d %H:%M:%S"
        )
        work_id = self.helper.api.work.initiate_work(
            self.helper.connect_id, friendly_name
        )
        bundle_objects = list()

        # create an identity for the coalition team
        organization = stix2.Identity(
            id=OpenCTIStix2Utils.generate_random_stix_id("identity"),
            name="Cyber Threat Coalition Team",
            identity_class="organization",
            description="Team of Experts collecting and sharing pandemic related "
            "cyber threat intelligence during the COVID-19 crisis time",
        )

        # add organization in bundle
        bundle_objects.append(organization)
        report_object_refs = list()

        for collection in ["domain", "ip", "url", "hash"]:
            # fetch backlist
            url = self.cyber_threat_coalition_base_url + "/" + str(collection) + ".txt"
            response = requests.get(url=url)
            if response.status_code != 200:
                raise Exception(
                    "Unable to fetch {0} blacklist, server returned status: {1}",
                    collection,
                    response.status_code,
                )
            pattern_type = "stix"
            labels = ["COVID-19", "malicious-activity"]
            # parse content
            for data in response.iter_lines(decode_unicode=True):
                observable_type = None
                observable_resolver = None
                if data and not data.startswith("#"):
                    if collection == "domain":
                        observable_resolver = "Domain-Name"
                        observable_type = "Domain-Name"
                    elif collection == "ip":
                        observable_resolver = "IPv4-Addr"
                        observable_type = "IPv4-Addr"
                    elif collection == "url":
                        observable_resolver = "Url"
                        observable_type = "Url"
                        data = urllib.parse.quote(data, "/:")
                    elif collection == "hash":
                        observable_resolver = self.get_hash_type()
                        observable_type = "File"
                    indicator = None
                    if observable_resolver is None or observable_type is None:
                        return
                    if self.cyber_threat_coalition_create_indicators:
                        indicator = stix2.Indicator(
                            id=OpenCTIStix2Utils.generate_random_stix_id("indicator"),
                            name=data,
                            pattern_type=pattern_type,
                            pattern=self._INDICATOR_PATTERN[observable_resolver].format(
                                data
                            ),
                            labels=labels,
                            created_by_ref=organization,
                            object_marking_refs=[stix2.TLP_WHITE],
                            custom_properties={
                                "x_opencti_main_observable_type": observable_type,
                            },
                        )
                        bundle_objects.append(indicator)
                        report_object_refs.append(indicator["id"])
                    if self.cyber_threat_coalition_create_observables:
                        observable = SimpleObservable(
                            id=OpenCTIStix2Utils.generate_random_stix_id(
                                "x-opencti-simple-observable"
                            ),
                            key=observable_type
                            + "."
                            + ".".join(self._OBSERVABLE_PATH[observable_resolver]),
                            value=data,
                            labels=labels,
                            created_by_ref=organization,
                            object_marking_refs=[stix2.TLP_WHITE],
                        )
                        bundle_objects.append(observable)
                        report_object_refs.append(observable["id"])
                        if indicator is not None:
                            relationship = stix2.Relationship(
                                id=OpenCTIStix2Utils.generate_random_stix_id(
                                    "relationship"
                                ),
                                relationship_type="based-on",
                                created_by_ref=organization,
                                source_ref=indicator.id,
                                target_ref=observable.id,
                            )
                            bundle_objects.append(relationship)
                            report_object_refs.append(relationship["id"])

        # create a global threat report
        report_uuid = "report--552b3ae6-8522-409d-8b72-a739bc1926aa"
        report_external_reference = stix2.ExternalReference(
            source_name="Cyber Threat Coalition",
            url="https://www.cyberthreatcoalition.org",
            external_id="COVID19-CTC",
        )
        if report_object_refs:
            stix_report = stix2.Report(
                id=report_uuid,
                name="COVID-19 Cyber Threat Coalition (CTC) BlackList",
                type="report",
                description="This report represents the whole COVID-19 CTC blacklist.",
                published=datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
                created_by_ref=organization,
                object_marking_refs=[stix2.TLP_WHITE],
                labels=labels,
                external_references=[report_external_reference],
                object_refs=report_object_refs,
            )
            # add report in bundle
            bundle_objects.append(stix_report)

        # create stix bundle
        bundle = stix2.Bundle(objects=bundle_objects)

        # send data
        self.helper.send_stix2_bundle(
            bundle=bundle.serialize(), update=self.update_existing_data, work_id=work_id
        )
        return work_id

    def _load_state(self) -> Dict[str, Any]:
        current_state = self.helper.get_state()
        if not current_state:
            return {}
        return current_state

    def _is_scheduled(self, last_run: Optional[int], current_time: int) -> bool:
        if last_run is None:
            return True
        time_diff = current_time - last_run
        return time_diff >= self.get_interval()

    @staticmethod
    def _get_state_value(
        state: Optional[Mapping[str, Any]], key: str, default: Optional[Any] = None
    ) -> Any:
        if state is not None:
            return state.get(key, default)
        return default

    @staticmethod
    def _current_unix_timestamp() -> int:
        return int(time.time())

    def run(self):
        self.helper.log_info("Fetching Cyber Threat Coalition vetted blacklists...")
        while True:
            try:
                timestamp = self._current_unix_timestamp()
                current_state = self._load_state()
                self.helper.log_info(f"Loaded state: {current_state}")

                last_run = self._get_state_value(current_state, self._STATE_LAST_RUN)
                if self._is_scheduled(last_run, timestamp):
                    # fetch data and send as stix bundle
                    work_id = self.fetch_and_send()
                    new_state = current_state.copy()
                    new_state[self._STATE_LAST_RUN] = self._current_unix_timestamp()
                    message = f"Run done. Storing new state: {new_state}"
                    self.helper.log_info(message)
                    self.helper.api.work.to_processed(work_id, message)
                    self.helper.set_state(new_state)
                    self.helper.log_info(
                        f"State stored, next run in: {self.get_interval()} seconds"
                    )
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        f"Connector will not run, next run in: {new_interval} seconds"
                    )
                time.sleep(60)

            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as ex:
                self.helper.log_error(str(ex))
                time.sleep(60)
Beispiel #26
0
class ReportImporter:
    def __init__(self) -> None:
        # Instantiate the connector helper from config
        base_path = os.path.dirname(os.path.abspath(__file__))
        config_file_path = base_path + "/../config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})

        self.helper = OpenCTIConnectorHelper(config)
        self.create_indicator = get_config_variable(
            "IMPORT_DOCUMENT_CREATE_INDICATOR",
            ["import_document", "create_indicator"],
            config,
        )

        # Load Entity and Observable configs
        observable_config_file = base_path + "/config/observable_config.ini"
        entity_config_file = base_path + "/config/entity_config.ini"

        if os.path.isfile(observable_config_file) and os.path.isfile(
                entity_config_file):
            self.observable_config = self._parse_config(
                observable_config_file, Observable)
        else:
            raise FileNotFoundError(f"{observable_config_file} was not found")

        if os.path.isfile(entity_config_file):
            self.entity_config = self._parse_config(entity_config_file,
                                                    EntityConfig)
        else:
            raise FileNotFoundError(f"{entity_config_file} was not found")

    def _process_message(self, data: Dict) -> str:
        self.helper.log_info("Processing new message")
        file_name = self._download_import_file(data)
        entity_id = data.get("entity_id", None)
        bypass_validation = data.get("bypass_validation", False)
        entity = (self.helper.api.stix_domain_object.read(
            id=entity_id) if entity_id is not None else None)
        if self.helper.get_only_contextual() and entity is None:
            return "Connector is only contextual and entity is not defined. Nothing was imported"

        # Retrieve entity set from OpenCTI
        entity_indicators = self._collect_stix_objects(self.entity_config)

        # Parse report
        parser = ReportParser(self.helper, entity_indicators,
                              self.observable_config)
        parsed = parser.run_parser(file_name, data["file_mime"])
        os.remove(file_name)

        if not parsed:
            return "No information extracted from report"

        # Process parsing results
        self.helper.log_debug("Results: {}".format(parsed))
        observables, entities = self._process_parsing_results(parsed, entity)
        # Send results to OpenCTI
        observable_cnt = self._process_parsed_objects(entity, observables,
                                                      entities,
                                                      bypass_validation,
                                                      file_name)
        entity_cnt = len(entities)

        if self.helper.get_validate_before_import() and not bypass_validation:
            return "Generated bundle sent for validation"
        else:
            return (
                f"Sent {observable_cnt} observables, 1 report update and {entity_cnt} entity connections as stix "
                f"bundle for worker import ")

    def start(self) -> None:
        self.helper.listen(self._process_message)

    def _download_import_file(self, data: Dict) -> str:
        file_fetch = data["file_fetch"]
        file_uri = self.helper.opencti_url + file_fetch

        # Downloading and saving file to connector
        self.helper.log_info("Importing the file " + file_uri)
        file_name = os.path.basename(file_fetch)
        file_content = self.helper.api.fetch_opencti_file(file_uri, True)

        with open(file_name, "wb") as f:
            f.write(file_content)

        return file_name

    def _collect_stix_objects(
            self, entity_config_list: List[EntityConfig]) -> List[Entity]:
        base_func = self.helper.api
        entity_list = []
        for entity_config in entity_config_list:
            func_format = entity_config.stix_class
            try:
                custom_function = getattr(base_func, func_format)
                entries = custom_function.list(
                    getAll=True,
                    filters=entity_config.filter,
                    customAttributes=entity_config.custom_attributes,
                )
                entity_list += entity_config.convert_to_entity(
                    entries, self.helper)
            except AttributeError:
                e = "Selected parser format is not supported: {}".format(
                    func_format)
                raise NotImplementedError(e)

        return entity_list

    @staticmethod
    def _parse_config(config_file: str,
                      file_class: Callable) -> List[BaseModel]:
        config = MyConfigParser()
        config.read(config_file)

        config_list = []
        for section, content in config.as_dict().items():
            content["name"] = section
            config_object = file_class(**content)
            config_list.append(config_object)

        return config_list

    def _process_parsing_results(
            self, parsed: List[Dict],
            context_entity: Dict) -> (List[SimpleObservable], List[str]):
        observables = []
        entities = []
        if context_entity is not None:
            object_markings = [
                x["standard_id"]
                for x in context_entity.get("objectMarking", [])
            ]
            # external_references = [x['standard_id'] for x in report.get('externalReferences', [])]
            # labels = [x['standard_id'] for x in report.get('objectLabel', [])]
            author = context_entity.get("createdBy")
        else:
            object_markings = []
            author = None
        if author is not None:
            author = author.get("standard_id", None)
        for match in parsed:
            if match[RESULT_FORMAT_TYPE] == OBSERVABLE_CLASS:
                if match[RESULT_FORMAT_CATEGORY] == "Vulnerability.name":
                    entity = self.helper.api.vulnerability.read(
                        filters={
                            "key": "name",
                            "values": [match[RESULT_FORMAT_MATCH]]
                        })
                    if entity is None:
                        self.helper.log_info(
                            f"Vulnerability with name '{match[RESULT_FORMAT_MATCH]}' could not be "
                            f"found. Is the CVE Connector activated?")
                        continue

                    entities.append(entity["standard_id"])
                elif match[
                        RESULT_FORMAT_CATEGORY] == "Attack-Pattern.x_mitre_id":
                    entity = self.helper.api.attack_pattern.read(
                        filters={
                            "key": "x_mitre_id",
                            "values": [match[RESULT_FORMAT_MATCH]],
                        })
                    if entity is None:
                        self.helper.log_info(
                            f"AttackPattern with MITRE ID '{match[RESULT_FORMAT_MATCH]}' could not be "
                            f"found. Is the MITRE Connector activated?")
                        continue

                    entities.append(entity["standard_id"])
                else:
                    observable = SimpleObservable(
                        id=OpenCTIStix2Utils.generate_random_stix_id(
                            "x-opencti-simple-observable"),
                        key=match[RESULT_FORMAT_CATEGORY],
                        value=match[RESULT_FORMAT_MATCH],
                        x_opencti_create_indicator=self.create_indicator,
                        object_marking_refs=object_markings,
                        created_by_ref=author,
                        # labels=labels,
                        # external_references=external_references
                    )
                    observables.append(observable)

            elif match[RESULT_FORMAT_TYPE] == ENTITY_CLASS:
                entities.append(match[RESULT_FORMAT_MATCH])
            else:
                self.helper.log_info("Odd data received: {}".format(match))

        return observables, entities

    def _process_parsed_objects(
        self,
        entity: Dict,
        observables: List,
        entities: List,
        bypass_validation: bool,
        file_name: str,
    ) -> int:

        if len(observables) == 0 and len(entities) == 0:
            return 0

        if entity is not None and entity["entity_type"] == "Report":
            report = Report(
                id=entity["standard_id"],
                name=entity["name"],
                description=entity["description"],
                published=self.helper.api.stix2.format_date(entity["created"]),
                report_types=entity["report_types"],
                object_refs=observables + entities,
                allow_custom=True,
            )
            observables.append(report)
        elif entity is not None:
            # TODO, relate all object to the entity
            entity_stix_bundle = self.helper.api.stix2.export_entity(
                entity["entity_type"], entity["id"])
            observables = observables + entity_stix_bundle["objects"]
        else:
            timestamp = int(time.time())
            now = datetime.utcfromtimestamp(timestamp)
            report = Report(
                name=file_name,
                description="Automatic import",
                published=now,
                report_types=["threat-report"],
                object_refs=observables + entities,
                allow_custom=True,
            )
            observables.append(report)
        bundles_sent = []
        if len(observables) > 0:
            bundle = Bundle(objects=observables, allow_custom=True).serialize()
            bundles_sent = self.helper.send_stix2_bundle(
                bundle=bundle,
                update=True,
                bypass_validation=bypass_validation,
                file_name=file_name + ".json",
                entity_id=entity["id"] if entity is not None else None,
            )

        # len() - 1 because the report update increases the count by one
        return len(bundles_sent) - 1
class HybridAnalysis:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        self.api_key = get_config_variable("HYBRID_ANALYSIS_TOKEN",
                                           ["hybrid_analysis", "api_key"],
                                           config)
        self.environment_id = get_config_variable(
            "HYBRID_ANALYSIS_ENVIRONMENT_ID",
            ["hybrid_analysis", "environment_id"],
            config,
            True,
            110,
        )
        self.max_tlp = get_config_variable("HYBRID_ANALYSIS_MAX_TLP",
                                           ["hybrid_analysis", "max_tlp"],
                                           config)
        self.api_url = "https://www.hybrid-analysis.com/api/v2"
        self.headers = {
            "api-key": self.api_key,
            "user-agent": "OpenCTI Hybrid Analysis Connector - Version 4.5.5",
            "accept": "application/json",
        }
        self.identity = self.helper.api.identity.create(
            type="Organization",
            name="Hybrid Analysis",
            description="Hybrid Analysis Sandbox.",
        )["standard_id"]
        self._CONNECTOR_RUN_INTERVAL_SEC = 60 * 60

    def _send_knowledge(self, observable, report):
        bundle_objects = []
        final_observable = observable
        if observable["entity_type"] in ["StixFile", "Artifact"]:
            final_observable = self.helper.api.stix_cyber_observable.update_field(
                id=final_observable["id"],
                key="hashes.MD5",
                value=report["md5"])
            final_observable = self.helper.api.stix_cyber_observable.update_field(
                id=final_observable["id"],
                key="hashes.SHA-1",
                value=report["sha1"])
            final_observable = self.helper.api.stix_cyber_observable.update_field(
                id=final_observable["id"],
                key="hashes.SHA-256",
                value=report["sha256"],
            )
            if "name" not in final_observable or final_observable[
                    "name"] is None:
                self.helper.api.stix_cyber_observable.update_field(
                    id=final_observable["id"],
                    key="x_opencti_additional_names",
                    value=report["submit_name"],
                    operation="add",
                )
            if final_observable["entity_type"] == "StixFile":
                self.helper.api.stix_cyber_observable.update_field(
                    id=final_observable["id"],
                    key="size",
                    value=str(report["size"]),
                )
        self.helper.api.stix_cyber_observable.update_field(
            id=final_observable["id"],
            key="x_opencti_score",
            value=str(report["threat_score"]),
        )
        # Create external reference
        external_reference = self.helper.api.external_reference.create(
            source_name="Hybrid Analysis",
            url="https://www.hybrid-analysis.com/sample/" + report["sha256"],
            description="Hybrid Analysis Report",
        )
        self.helper.api.stix_cyber_observable.add_external_reference(
            id=final_observable["id"],
            external_reference_id=external_reference["id"],
        )
        # Create tags
        for tag in report["type_short"]:
            tag_ha = self.helper.api.label.create(value=tag, color="#0059f7")
            self.helper.api.stix_cyber_observable.add_label(
                id=final_observable["id"], label_id=tag_ha["id"])
        # Attach the TTPs
        for tactic in report["mitre_attcks"]:
            if (tactic["malicious_identifiers_count"] > 0
                    or tactic["suspicious_identifiers_count"] > 0):
                attack_pattern = AttackPattern(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "attack-pattern"),
                    created_by_ref=self.identity,
                    name=tactic["technique"],
                    custom_properties={
                        "x_mitre_id": tactic["attck_id"],
                    },
                    object_marking_refs=[TLP_WHITE],
                )
                relationship = Relationship(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "relationship"),
                    relationship_type="uses",
                    created_by_ref=self.identity,
                    source_ref=final_observable["standard_id"],
                    target_ref=attack_pattern.id,
                    object_marking_refs=[TLP_WHITE],
                )
                bundle_objects.append(attack_pattern)
                bundle_objects.append(relationship)
        # Attach the domains
        for domain in report["domains"]:
            domain_stix = SimpleObservable(
                id=OpenCTIStix2Utils.generate_random_stix_id(
                    "x-opencti-simple-observable"),
                key="Domain-Name.value",
                value=domain,
                created_by_ref=self.identity,
                object_marking_refs=[TLP_WHITE],
            )
            relationship = Relationship(
                id=OpenCTIStix2Utils.generate_random_stix_id("relationship"),
                relationship_type="communicates-with",
                created_by_ref=self.identity,
                source_ref=final_observable["standard_id"],
                target_ref=domain_stix.id,
                object_marking_refs=[TLP_WHITE],
            )
            bundle_objects.append(domain_stix)
            bundle_objects.append(relationship)
        # Attach the IP addresses
        for host in report["hosts"]:
            host_stix = SimpleObservable(
                id=OpenCTIStix2Utils.generate_random_stix_id(
                    "x-opencti-simple-observable"),
                key=self.detect_ip_version(host) + ".value",
                value=host,
                created_by_ref=self.identity,
                object_marking_refs=[TLP_WHITE],
            )
            relationship = Relationship(
                id=OpenCTIStix2Utils.generate_random_stix_id("relationship"),
                relationship_type="communicates-with",
                created_by_ref=self.identity,
                source_ref=final_observable["standard_id"],
                target_ref=host_stix.id,
                object_marking_refs=[TLP_WHITE],
            )
            bundle_objects.append(host_stix)
            bundle_objects.append(relationship)
        # Attach other files
        for file in report["extracted_files"]:
            if file["threat_level"] > 0:
                file_stix = File(
                    id=OpenCTIStix2Utils.generate_random_stix_id("file"),
                    hashes={
                        "MD5": file["md5"],
                        "SHA-1": file["sha1"],
                        "SHA-256": file["sha256"],
                    },
                    size=file["size"],
                    name=file["name"],
                    custom_properties={"x_opencti_labels": file["type_tags"]},
                    created_by_ref=self.identity,
                    object_marking_refs=[TLP_WHITE],
                )
                relationship = Relationship(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "relationship"),
                    relationship_type="drops",
                    created_by_ref=self.identity,
                    source_ref=final_observable["standard_id"],
                    target_ref=file_stix.id,
                )
                bundle_objects.append(file_stix)
                bundle_objects.append(relationship)
        for tactic in report["mitre_attcks"]:
            if (tactic["malicious_identifiers_count"] > 0
                    or tactic["suspicious_identifiers_count"] > 0):
                attack_pattern = AttackPattern(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "attack-pattern"),
                    created_by_ref=self.identity,
                    name=tactic["technique"],
                    custom_properties={
                        "x_mitre_id": tactic["attck_id"],
                    },
                )
                relationship = Relationship(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "relationship"),
                    relationship_type="uses",
                    created_by_ref=self.identity,
                    source_ref=final_observable["standard_id"],
                    target_ref=attack_pattern.id,
                )
                bundle_objects.append(attack_pattern)
                bundle_objects.append(relationship)
        if len(bundle_objects) > 0:
            bundle = Bundle(objects=bundle_objects).serialize()
            bundles_sent = self.helper.send_stix2_bundle(bundle)
            return ("Sent " + str(len(bundles_sent)) +
                    " stix bundle(s) for worker import")
        else:
            return "Nothing to attach"

    def _submit_url(self, observable):
        self.helper.log_info("Observable is a URL, triggering the sandbox...")
        values = {
            "url": observable["observable_value"],
            "environment_id": self.environment_id,
        }
        r = requests.post(
            self.api_url + "/submit/url",
            headers=self.headers,
            data=values,
        )
        if r.status_code > 299:
            raise ValueError(r.text)
        result = r.json()
        job_id = result["job_id"]
        state = "IN_QUEUE"
        self.helper.log_info("Analysis in progress...")
        while state == "IN_QUEUE" or state == "IN_PROGRESS":
            r = requests.get(
                self.api_url + "/report/" + job_id + "/state",
                headers=self.headers,
            )
            if r.status_code > 299:
                raise ValueError(r.text)
            result = r.json()
            state = result["state"]
            time.sleep(30)
        if state == "ERROR":
            raise ValueError(result["error"])
        r = requests.get(
            self.api_url + "/report/" + job_id + "/summary",
            headers=self.headers,
        )
        if r.status_code > 299:
            raise ValueError(r.text)
        result = r.json()
        self.helper.log_info("Analysis done, attaching knowledge...")
        return self._send_knowledge(observable, result)

    def _trigger_sandbox(self, observable):
        self.helper.log_info("File not found in HA, triggering the sandbox...")
        file_name = observable["importFiles"][0]["name"]
        file_uri = observable["importFiles"][0]["id"]
        file_content = self.helper.api.fetch_opencti_file(
            self.api_url + file_uri, True)
        # Write the file
        f = open(file_name, "wb")
        f.write(file_content)
        f.close()
        files = {"file": open(file_name, "rb")}
        values = {"environment_id": self.environment_id}
        r = requests.post(
            self.api_url + "/submit/file",
            headers=self.headers,
            files=files,
            data=values,
        )
        os.remove(file_name)
        if r.status_code > 299:
            raise ValueError(r.text)
        result = r.json()
        job_id = result["job_id"]
        state = "IN_QUEUE"
        self.helper.log_info("Analysis in progress...")
        while state == "IN_QUEUE" or state == "IN_PROGRESS":
            r = requests.get(
                self.api_url + "/report/" + job_id + "/state",
                headers=self.headers,
            )
            if r.status_code > 299:
                raise ValueError(r.text)
            result = r.json()
            state = result["state"]
            time.sleep(30)
        if state == "ERROR":
            raise ValueError(result["error"])
        r = requests.get(
            self.api_url + "/report/" + job_id + "/summary",
            headers=self.headers,
        )
        if r.status_code > 299:
            raise ValueError(r.text)
        result = r.json()
        self.helper.log_info("Analysis done, attaching knowledge...")
        return self._send_knowledge(observable, result)

    def _process_observable(self, observable):
        self.helper.log_info("Processing the observable " +
                             observable["observable_value"])
        # If File or Artifact
        result = []
        if observable["entity_type"] in ["StixFile", "Artifact"]:
            # First, check if the file is present is HA
            values = {"hash": observable["observable_value"]}
            r = requests.post(
                self.api_url + "/search/hash",
                headers=self.headers,
                data=values,
            )
            if r.status_code > 299:
                raise ValueError(r.text)
            result = r.json()
        if len(result) > 0:
            # One report is found
            self.helper.log_info("Already found in HA, attaching knowledge...")
            return self._send_knowledge(observable, result[0])
        # If URL
        if observable["entity_type"] in [
                "Url", "Domain-Name", "X-OpenCTI-Hostname"
        ]:
            return self._submit_url(observable)
        # If no file
        if "importFiles" not in observable or len(
                observable["importFiles"]) == 0:
            return "Observable not found and no file to upload in the sandbox"
        return self._trigger_sandbox(observable)

    def _process_message(self, data):
        entity_id = data["entity_id"]
        observable = self.helper.api.stix_cyber_observable.read(id=entity_id)
        if observable is None:
            raise ValueError(
                "Observable not found "
                "(may be linked to data seggregation, check your group and permissions)"
            )
        # Extract TLP
        tlp = "TLP:WHITE"
        for marking_definition in observable["objectMarking"]:
            if marking_definition["definition_type"] == "TLP":
                tlp = marking_definition["definition"]
        if not OpenCTIConnectorHelper.check_max_tlp(tlp, self.max_tlp):
            raise ValueError(
                "Do not send any data, TLP of the observable is greater than MAX TLP"
            )
        return self._process_observable(observable)

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)

    def detect_ip_version(self, value):
        if len(value) > 16:
            return "IPv6-Addr"
        else:
            return "IPv4-Addr"
Beispiel #28
0
class VirusTotalConnector:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        self.token = get_config_variable("VIRUSTOTAL_TOKEN",
                                         ["virustotal", "token"], config)
        self.max_tlp = get_config_variable("VIRUSTOTAL_MAX_TLP",
                                           ["virustotal", "max_tlp"], config)
        self.api_url = "https://www.virustotal.com/api/v3"
        self.headers = {
            "x-apikey": self.token,
            "accept": "application/json",
            "content-type": "application/json",
        }
        self._CONNECTOR_RUN_INTERVAL_SEC = 60 * 60

    def _process_file(self, observable):
        response = requests.request(
            "GET",
            self.api_url + "/files/" + observable["observable_value"],
            headers=self.headers,
        )
        json_data = json.loads(response.text)
        if "error" in json_data:
            if json_data["error"]["message"] == "Quota exceeded":
                self.helper.log_info("Quota reached, waiting 1 hour.")
                sleep(self._CONNECTOR_RUN_INTERVAL_SEC)
            elif "not found" in json_data["error"]["message"]:
                self.helper.log_info("File not found on VirusTotal.")
                return "File not found on VirusTotal."
            else:
                raise ValueError(json_data["error"]["message"])
        if "data" in json_data:
            data = json_data["data"]
            attributes = data["attributes"]
            # Update the current observable
            final_observable = self.helper.api.stix_cyber_observable.update_field(
                id=observable["id"], key="hashes.MD5", value=attributes["md5"])
            final_observable = self.helper.api.stix_cyber_observable.update_field(
                id=final_observable["id"],
                key="hashes.SHA-1",
                value=attributes["sha1"])
            final_observable = self.helper.api.stix_cyber_observable.update_field(
                id=final_observable["id"],
                key="hashes.SHA-256",
                value=attributes["sha256"],
            )
            if observable["entity_type"] == "StixFile":
                self.helper.api.stix_cyber_observable.update_field(
                    id=final_observable["id"],
                    key="size",
                    value=str(attributes["size"]),
                )
            if observable["name"] is None and len(attributes["names"]) > 0:
                self.helper.api.stix_cyber_observable.update_field(
                    id=final_observable["id"],
                    key="name",
                    value=attributes["names"][0])
                del attributes["names"][0]
                if len(attributes["names"]) > 0:
                    self.helper.api.stix_cyber_observable.update_field(
                        id=final_observable["id"],
                        key="x_opencti_additional_names",
                        value=attributes["names"],
                    )

            # Create external reference
            external_reference = self.helper.api.external_reference.create(
                source_name="VirusTotal",
                url="https://www.virustotal.com/gui/file/" +
                attributes["sha256"],
                description=attributes["magic"],
            )

            # Create tags
            for tag in attributes["tags"]:
                tag_vt = self.helper.api.label.create(value=tag,
                                                      color="#0059f7")
                self.helper.api.stix_cyber_observable.add_label(
                    id=final_observable["id"], label_id=tag_vt["id"])

            self.helper.api.stix_cyber_observable.add_external_reference(
                id=final_observable["id"],
                external_reference_id=external_reference["id"],
            )

            return "File found on VirusTotal, knowledge attached."

    def _process_message(self, data):
        entity_id = data["entity_id"]
        observable = self.helper.api.stix_cyber_observable.read(id=entity_id)
        # Extract TLP
        tlp = "TLP:WHITE"
        for marking_definition in observable["objectMarking"]:
            if marking_definition["definition_type"] == "TLP":
                tlp = marking_definition["definition"]
        if not OpenCTIConnectorHelper.check_max_tlp(tlp, self.max_tlp):
            raise ValueError(
                "Do not send any data, TLP of the observable is greater than MAX TLP"
            )
        return self._process_file(observable)

    # Start the main loop
    def start(self):
        self.helper.listen(self._process_message)
Beispiel #29
0
class TheHive:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.SafeLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.thehive_url = get_config_variable("THEHIVE_URL",
                                               ["thehive", "url"], config)
        self.thehive_api_key = get_config_variable("THEHIVE_API_KEY",
                                                   ["thehive", "api_key"],
                                                   config)
        self.thehive_check_ssl = get_config_variable("THEHIVE_CHECK_SSL",
                                                     ["thehive", "check_ssl"],
                                                     config, False, True)
        self.thehive_organization_name = get_config_variable(
            "THEHIVE_ORGANIZATION_NAME", ["thehive", "organization_name"],
            config)
        self.thehive_import_from_date = get_config_variable(
            "THEHIVE_IMPORT_FROM_DATE",
            ["thehive", "import_from_date"],
            config,
            False,
            datetime.utcfromtimestamp(int(
                time.time())).strftime("%Y-%m-%d %H:%M:%S"),
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.identity = self.helper.api.identity.create(
            type="Organization",
            name=self.thehive_organization_name,
            description=self.thehive_organization_name,
        )
        self.thehive_api = TheHiveApi(self.thehive_url,
                                      self.thehive_api_key,
                                      cert=self.thehive_check_ssl)

    def generate_case_bundle(self, case):
        markings = []
        if case["tlp"] == 0:
            markings.append(TLP_WHITE)
        if case["tlp"] == 1:
            markings.append(TLP_GREEN)
        if case["tlp"] == 2:
            markings.append(TLP_AMBER)
        if case["tlp"] == 3:
            markings.append(TLP_RED)
        if len(markings) == 0:
            markings.append(TLP_WHITE)
        bundle_objects = []
        incident = StixXOpenCTIIncident(
            id=OpenCTIStix2Utils.generate_random_stix_id("x-opencti-incident"),
            name=case["title"],
            description=case["description"],
            first_seen=datetime.utcfromtimestamp(
                int(case["createdAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"),
            last_seen=datetime.utcfromtimestamp(
                int(case["updatedAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"),
            object_marking_refs=markings,
            labels=case["tags"] if "tags" in case else [],
            created_by_ref=self.identity["standard_id"],
        )
        bundle_objects.append(incident)
        # Get observables
        observables = self.thehive_api.get_case_observables(
            case_id=case["id"]).json()
        for observable in observables:
            if observable["dataType"] == "hash":
                if len(observable["data"]) == 32:
                    data_type = "file_md5"
                elif len(observable["data"]) == 40:
                    data_type = "file_sha1"
                elif len(observable["data"]) == 64:
                    data_type = "file_sha256"
                else:
                    data_type = "unknown"
            else:
                data_type = observable["dataType"]
            observable_key = OBSERVABLES_MAPPING[data_type]
            if observable_key is not None:
                stix_observable = SimpleObservable(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "x-opencti-simple-observable"),
                    key=observable_key,
                    value=observable["data"],
                    description=observable["message"],
                    x_opencti_score=80 if observable["ioc"] else 50,
                    object_marking_refs=markings,
                    labels=observable["tags"] if "tags" in observable else [],
                    created_by_ref=self.identity["standard_id"],
                    x_opencti_create_indicator=observable["ioc"],
                )
                stix_observable_relation = Relationship(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "relationship"),
                    relationship_type="related-to",
                    created_by_ref=self.identity["standard_id"],
                    source_ref=stix_observable.id,
                    target_ref=incident.id,
                    object_marking_refs=markings,
                )
                bundle_objects.append(stix_observable)
                bundle_objects.append(stix_observable_relation)
                if observable["sighted"]:
                    fake_indicator_id = (
                        "indicator--c1034564-a9fb-429b-a1c1-c80116cc8e1e")
                    stix_sighting = Sighting(
                        id=OpenCTIStix2Utils.generate_random_stix_id(
                            "sighting"),
                        first_seen=datetime.utcfromtimestamp(
                            int(observable["startDate"] /
                                1000)).strftime("%Y-%m-%dT%H:%M:%SZ"),
                        last_seen=datetime.utcfromtimestamp(
                            int(observable["startDate"] / 1000 +
                                3600)).strftime("%Y-%m-%dT%H:%M:%SZ"),
                        where_sighted_refs=[self.identity["standard_id"]],
                        sighting_of_ref=fake_indicator_id,
                        custom_properties={
                            "x_opencti_sighting_of_ref": stix_observable.id
                        },
                    )
                    bundle_objects.append(stix_sighting)
        bundle = Bundle(objects=bundle_objects).serialize()
        return bundle

    def run(self):
        self.helper.log_info("Starting TheHive Connector...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_case_date" in current_state:
                    last_case_date = current_state["last_case_date"]
                    self.helper.log_info(
                        "Connector last_case_date: " +
                        datetime.utcfromtimestamp(last_case_date).strftime(
                            "%Y-%m-%d %H:%M:%S"))
                else:
                    last_case_date = parse(
                        self.thehive_import_from_date).timestamp()
                    self.helper.log_info("Connector has no last_case_date")

                self.helper.log_info("Get cases since last run (" +
                                     datetime.utcfromtimestamp(last_case_date).
                                     strftime("%Y-%m-%d %H:%M:%S") + ")")
                query = Or(
                    Gt("updatedAt", int(last_case_date * 1000)),
                    Child("case_task",
                          Gt("createdAt", int(last_case_date * 1000))),
                    Child("case_artifact",
                          Gt("createdAt", int(last_case_date * 1000))),
                )
                cases = self.thehive_api.find_cases(query=query,
                                                    sort="updatedAt",
                                                    range="0-100").json()
                now = datetime.utcfromtimestamp(timestamp)
                friendly_name = "TheHive run @ " + now.strftime(
                    "%Y-%m-%d %H:%M:%S")
                work_id = self.helper.api.work.initiate_work(
                    self.helper.connect_id, friendly_name)
                try:
                    for case in cases:
                        stix_bundle = self.generate_case_bundle(case)
                        self.helper.send_stix2_bundle(
                            stix_bundle,
                            update=self.update_existing_data,
                            work_id=work_id,
                        )
                except Exception as e:
                    self.helper.log_error(str(e))
                # Store the current timestamp as a last run
                message = "Connector successfully run, storing last_run as " + str(
                    timestamp)
                self.helper.log_info(message)
                self.helper.api.work.to_processed(work_id, message)
                current_state = self.helper.get_state()
                if current_state is None:
                    current_state = {"last_case_date": timestamp}
                else:
                    current_state["last_case_date"] = timestamp
                self.helper.set_state(current_state)
                time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Beispiel #30
0
class Cve:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.cve_import_history = get_config_variable(
            "CVE_IMPORT_HISTORY", ["cve", "import_history"], config, False)
        self.cve_nvd_data_feed = get_config_variable("CVE_NVD_DATA_FEED",
                                                     ["cve", "nvd_data_feed"],
                                                     config)
        self.cve_history_data_feed = get_config_variable(
            "CVE_HISTORY_DATA_FEED", ["cve", "history_data_feed"], config)
        self.cve_interval = get_config_variable("CVE_INTERVAL",
                                                ["cve", "interval"], config,
                                                True)
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

    def get_interval(self):
        return int(self.cve_interval) * 60 * 60 * 24

    def delete_files(self):
        if os.path.exists("data.json"):
            os.remove("data.json")
        if os.path.exists("data.json.gz"):
            os.remove("data.json.gz")
        if os.path.exists("data-stix2.json"):
            os.remove("data-stix2.json")

    def convert_and_send(self, url, work_id):
        try:
            # Downloading json.gz file
            self.helper.log_info("Requesting the file " + url)
            response = urllib.request.urlopen(
                url,
                context=ssl.create_default_context(cafile=certifi.where()))
            image = response.read()
            with open(
                    os.path.dirname(os.path.abspath(__file__)) +
                    "/data.json.gz", "wb") as file:
                file.write(image)
            # Unzipping the file
            self.helper.log_info("Unzipping the file")
            with gzip.open("data.json.gz", "rb") as f_in:
                with open("data.json", "wb") as f_out:
                    shutil.copyfileobj(f_in, f_out)
            # Converting the file to stix2
            self.helper.log_info("Converting the file")
            convert("data.json", "data-stix2.json")
            with open("data-stix2.json") as stix_json:
                contents = stix_json.read()
                self.helper.send_stix2_bundle(
                    contents,
                    entities_types=self.helper.connect_scope,
                    update=self.update_existing_data,
                    work_id=work_id,
                )
            # Remove files
            self.delete_files()
        except Exception as e:
            self.delete_files()
            self.helper.log_error(str(e))
            time.sleep(60)

    def run(self):
        self.helper.log_info("Fetching CVE knowledge...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info("Connector last run: " +
                                         datetime.utcfromtimestamp(last_run).
                                         strftime("%Y-%m-%d %H:%M:%S"))
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")
                # If the last_run is more than interval-1 day
                if last_run is None or ((timestamp - last_run) > (
                    (int(self.cve_interval) - 1) * 60 * 60 * 24)):
                    timestamp = int(time.time())
                    now = datetime.utcfromtimestamp(timestamp)
                    friendly_name = "CVE run @ " + now.strftime(
                        "%Y-%m-%d %H:%M:%S")
                    work_id = self.helper.api.work.initiate_work(
                        self.helper.connect_id, friendly_name)
                    self.convert_and_send(self.cve_nvd_data_feed, work_id)
                    # If import history and never run
                    if last_run is None and self.cve_import_history:
                        now = datetime.now()
                        years = list(range(2002, now.year + 1))
                        for year in years:
                            self.convert_and_send(
                                f"{self.cve_history_data_feed}nvdcve-1.1-{year}.json.gz",
                                work_id,
                            )

                    # Store the current timestamp as a last run
                    self.helper.log_info(
                        "Connector successfully run, storing last_run as " +
                        str(timestamp))
                    self.helper.set_state({"last_run": timestamp})
                    message = (
                        "Last_run stored, next run in: " +
                        str(round(self.get_interval() / 60 / 60 / 24, 2)) +
                        " days")
                    self.helper.api.work.to_processed(work_id, message)
                    self.helper.log_info(message)
                    time.sleep(60)
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        "Connector will not run, next run in: " +
                        str(round(new_interval / 60 / 60 / 24, 2)) + " days")
                    time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)