Exemple #1
0
class Malpedia:
    """OpenCTI Malpedia main class"""

    _STATE_LAST_RUN = "state_last_run"
    _MALPEDIA_LAST_VERSION = "malpedia_last_version"

    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/../config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        # Extra config
        self.confidence_level = get_config_variable(
            "CONNECTOR_CONFIDENCE_LEVEL",
            ["connector", "confidence_level"],
            config,
            isNumber=True,
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.AUTH_KEY = get_config_variable("MALPEDIA_AUTH_KEY",
                                            ["malpedia", "auth_key"], config)
        self.INTERVAL_SEC = get_config_variable("MALPEDIA_INTERVAL_SEC",
                                                ["malpedia", "interval_sec"],
                                                config)
        self.import_intrusion_sets = get_config_variable(
            "MALPEDIA_IMPORT_INTRUSION_SETS",
            ["malpedia", "import_intrusion_sets"],
            config,
        )
        self.import_yara = get_config_variable("MALPEDIA_IMPORT_YARA",
                                               ["malpedia", "import_yara"],
                                               config)
        self.create_indicators = get_config_variable(
            "MALPEDIA_CREATE_INDICATORS", ["malpedia", "create_indicators"],
            config)
        self.create_observables = get_config_variable(
            "MALPEDIA_CREATE_OBSERVABLES", ["malpedia", "create_observables"],
            config)

        self.helper = OpenCTIConnectorHelper(config)
        self.helper.log_info(f"loaded malpedia config: {config}")

        # Create Malpedia client and importers
        self.client = MalpediaClient(self.AUTH_KEY)

        # If we run without API key we can assume all data is TLP:WHITE else we
        # default to TLP:AMBER to be safe.
        if self.client.unauthenticated:
            self.default_marking = self.helper.api.marking_definition.read(
                id=TLP_WHITE["id"])
        else:
            self.default_marking = self.helper.api.marking_definition.read(
                id=TLP_AMBER["id"])

        self.knowledge_importer = KnowledgeImporter(
            self.helper,
            self.client,
            self.confidence_level,
            self.update_existing_data,
            self.import_intrusion_sets,
            self.import_yara,
            self.create_indicators,
            self.create_observables,
            self.default_marking,
        )

    def _load_state(self) -> Dict[str, Any]:
        current_state = self.helper.get_state()
        if not current_state:
            return {}
        return current_state

    @staticmethod
    def _get_state_value(state: Optional[Mapping[str, Any]],
                         key: str,
                         default: Optional[Any] = None) -> Any:
        if state is not None:
            return state.get(key, default)
        return default

    def _is_scheduled(self, last_run: Optional[int],
                      current_time: int) -> bool:
        if last_run is None:
            return True
        time_diff = current_time - last_run
        return time_diff >= int(self.INTERVAL_SEC)

    def _check_version(self, last_version: Optional[int],
                       current_version: int) -> bool:
        if last_version is None:
            return True
        return current_version > last_version

    @staticmethod
    def _current_unix_timestamp() -> int:
        return int(datetime.utcnow().timestamp())

    def _get_interval(self):
        return int(self.INTERVAL_SEC)

    def run(self):
        self.helper.log_info("starting Malpedia connector...")
        while True:
            try:
                current_malpedia_version = self.client.current_version()
                self.helper.log_info(
                    f"current Malpedia version: {current_malpedia_version}")
                timestamp = self._current_unix_timestamp()
                current_state = self._load_state()

                self.helper.log_info(f"loaded state: {current_state}")

                last_run = self._get_state_value(current_state,
                                                 self._STATE_LAST_RUN)

                last_malpedia_version = self._get_state_value(
                    current_state, self._MALPEDIA_LAST_VERSION)

                # Only run the connector if:
                #  1. It is scheduled to run per interval
                #  2. The global Malpedia version from the API is newer than our
                #     last stored version.
                if self._is_scheduled(
                        last_run, timestamp) and self._check_version(
                            last_malpedia_version, current_malpedia_version):
                    self.helper.log_info("running importers")

                    knowledge_importer_state = self._run_knowledge_importer(
                        current_state)
                    self.helper.log_info("done with running importers")

                    new_state = current_state.copy()
                    new_state.update(knowledge_importer_state)
                    new_state[
                        self._STATE_LAST_RUN] = self._current_unix_timestamp()
                    new_state[
                        self._MALPEDIA_LAST_VERSION] = current_malpedia_version

                    self.helper.log_info(f"storing new state: {new_state}")

                    self.helper.set_state(new_state)

                    self.helper.log_info(
                        f"state stored, next run in: {self._get_interval()} seconds"
                    )
                else:
                    new_interval = self._get_interval() - (timestamp -
                                                           last_run)
                    self.helper.log_info(
                        f"connector will not run, next run in: {new_interval} seconds"
                    )

                time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                exit(0)

    def _run_knowledge_importer(
            self, current_state: Mapping[str, Any]) -> Mapping[str, Any]:
        return self.knowledge_importer.run(current_state)
Exemple #2
0
class Cve:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.cve_import_history = get_config_variable(
            "CVE_IMPORT_HISTORY", ["cve", "import_history"], config, False)
        self.cve_nvd_data_feed = get_config_variable("CVE_NVD_DATA_FEED",
                                                     ["cve", "nvd_data_feed"],
                                                     config)
        self.cve_history_data_feed = get_config_variable(
            "CVE_HISTORY_DATA_FEED", ["cve", "history_data_feed"], config)
        self.cve_interval = get_config_variable("CVE_INTERVAL",
                                                ["cve", "interval"], config,
                                                True)
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

    def get_interval(self):
        return int(self.cve_interval) * 60 * 60 * 24

    def convert_and_send(self, url):
        try:
            # Downloading json.gz file
            self.helper.log_info("Requesting the file " + url)
            urllib.request.urlretrieve(
                self.cve_nvd_data_feed,
                os.path.dirname(os.path.abspath(__file__)) + "/data.json.gz",
            )
            # Unzipping the file
            self.helper.log_info("Unzipping the file")
            with gzip.open("data.json.gz", "rb") as f_in:
                with open("data.json", "wb") as f_out:
                    shutil.copyfileobj(f_in, f_out)
            # Converting the file to stix2
            self.helper.log_info("Converting the file")
            convert("data.json", "data-stix2.json")
            with open("data-stix2.json") as stix_json:
                contents = stix_json.read()
                self.helper.send_stix2_bundle(contents,
                                              self.helper.connect_scope,
                                              self.update_existing_data)
            # Remove files
            os.remove("data.json")
            os.remove("data.json.gz")
            os.remove("data-stix2.json")
        except Exception as e:
            self.helper.log_error(str(e))
            time.sleep(60)

    def run(self):
        self.helper.log_info("Fetching CVE knowledge...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info("Connector last run: " +
                                         datetime.utcfromtimestamp(last_run).
                                         strftime("%Y-%m-%d %H:%M:%S"))
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")
                # If the last_run is more than interval-1 day
                if last_run is None or ((timestamp - last_run) > (
                    (int(self.cve_interval) - 1) * 60 * 60 * 24)):
                    self.convert_and_send(self.cve_nvd_data_feed)
                    # If import history and never run
                    if last_run is None or self.cve_import_history:
                        now = datetime.now()
                        years = list(range(2002, now.year + 1))
                        for year in years:
                            self.convert_and_send(
                                f"{self.cve_history_data_feed}nvdcve-1.1-{year}.json.gz"
                            )

                    # Store the current timestamp as a last run
                    self.helper.log_info(
                        "Connector successfully run, storing last_run as " +
                        str(timestamp))
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.log_info(
                        "Last_run stored, next run in: " +
                        str(round(self.get_interval() / 60 / 60 / 24, 2)) +
                        " days")
                    time.sleep(60)
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        "Connector will not run, next run in: " +
                        str(round(new_interval / 60 / 60 / 24, 2)) + " days")
                    time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
class Cybercrimetracker:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = "{}/config.yml".format(
            os.path.dirname(os.path.abspath(__file__)))

        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)

        # Connector Config
        self.confidence_level = get_config_variable(
            "CONNECTOR_CONFIDENCE_LEVEL",
            ["connector", "confidence_level"],
            config,
            isNumber=True,
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

        # CYBERCRIME-TRACKER.NET Config
        self.feed_url = get_config_variable("CYBERCRIMET_RACKER_FEED_URL",
                                            ["cybercrime-tracker", "feed_url"],
                                            config)
        self.connector_tlp = get_config_variable("CYBERCRIME_TRACKER_TLP",
                                                 ["cybercrime-tracker", "tlp"],
                                                 config)
        self.create_indicators = get_config_variable(
            "CYBERCRIME_TRACKER_CREATE_INDICATORS",
            ["cybercrime-tracker", "create_indicators"],
            config,
        )
        self.create_observables = get_config_variable(
            "CYBERCRIME_TRACKER_CREATE_OBSERVABLES",
            ["cybercrime-tracker", "create_observables"],
            config,
        )
        self.interval = get_config_variable(
            "CYBERCRIMETRACKER_INTERVAL",
            ["cybercrime-tracker", "interval"],
            config,
            isNumber=True,
        )

    @staticmethod
    def _time_to_datetime(input_date: time) -> datetime.datetime:
        return datetime.datetime(
            input_date.tm_year,
            input_date.tm_mon,
            input_date.tm_mday,
            input_date.tm_hour,
            input_date.tm_min,
            input_date.tm_sec,
            tzinfo=datetime.timezone.utc,
        )

    def parse_feed_entry(self, entry):
        """
        Parses an entry from the feed and returns a dict with:

        date: date in iso format
        type: name of the malware associated with the C2 server
        url: the url of the C2
        ip: the IP address of the C2
        ext_link: An external link to CYBERCRIME-TRACKER.NET with details

        Note: CYBERCRIME-TRACKER.NET does not provide the protocol in the url
        as such we always assume 'http'.
        """
        parsed_entry = {}

        pattern = (
            r"(?:\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type}" +
            r"\s+-%{GREEDYDATA}:\s+%{IP:ip}|" +
            r"\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type})")

        entry_summary = Grok(pattern).match(entry["summary"])

        if entry_summary:
            parsed_entry["date"] = self._time_to_datetime(
                entry["published_parsed"])
            parsed_entry["type"] = entry_summary["type"]
            parsed_entry["ext_link"] = entry["link"]
            parsed_entry["url"] = "http://{}".format(quote(entry["title"]))
            hostname = urlparse(parsed_entry["url"]).hostname

            if entry_summary["ip"] is None:
                parsed_entry["ip"] = hostname
            else:
                parsed_entry["ip"] = entry_summary["ip"]
                parsed_entry["domain"] = hostname

            self.helper.log_info("Parsed entry: {}".format(entry["title"]))

            return parsed_entry
        else:
            self.helper.log_error("Could not parse: {}".format(entry["title"]))
            return False

    def gen_indicator_pattern(self, parsed_entry):

        if "domain" in parsed_entry.keys():
            indicator_pattern = (
                "[ipv4-addr:value='{}'] ".format(parsed_entry["ip"]) +
                "AND [url:value='{}'] ".format(parsed_entry["url"]) +
                "AND [domain-name:value='{}']".format(parsed_entry["domain"]))
        else:
            indicator_pattern = "[ipv4-addr:value='{}'] ".format(
                parsed_entry["ip"]) + "AND [url:value='{}']".format(
                    parsed_entry["url"])

        return indicator_pattern

    def run(self):
        self.helper.log_info("Fetching data CYBERCRIME-TRACKER.NET...")
        tlp = self.helper.api.marking_definition.read(
            filters=[{
                "key": "definition",
                "values": "TLP:{}".format(self.connector_tlp)
            }])
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()

                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info("Connector last run: {}".format(
                        datetime.datetime.utcfromtimestamp(last_run).strftime(
                            "%Y-%m-%d %H:%M:%S")))
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")

                # Run if it is the first time or we are past the interval

                if last_run is None or (
                    (timestamp - last_run) > self.interval):
                    self.helper.log_info("Connector will run!")
                    now = datetime.datetime.utcfromtimestamp(timestamp)
                    friendly_name = "Cybercrime-Tracker run @ " + now.strftime(
                        "%Y-%m-%d %H:%M:%S")
                    work_id = self.helper.api.work.initiate_work(
                        self.helper.connect_id, friendly_name)

                    # Get Feed Content
                    feed = feedparser.parse(self.feed_url)

                    self.helper.log_info("Found: {} entries.".format(
                        len(feed["entries"])))

                    self.feed_summary = {
                        "Source":
                        feed["feed"]["title"],
                        "Date":
                        self._time_to_datetime(
                            feed["feed"]["published_parsed"]),
                        "Details":
                        feed["feed"]["subtitle"],
                        "Link":
                        feed["feed"]["link"],
                    }

                    # Create the bundle
                    bundle_objects = list()

                    organization = stix2.Identity(
                        id=OpenCTIStix2Utils.generate_random_stix_id(
                            "identity"),
                        name="CYBERCRIME-TRACKER.NET",
                        identity_class="organization",
                        description=
                        "Tracker collecting and sharing daily updates of C2 IPs/Urls. http://cybercrime-tracker.net",
                    )
                    bundle_objects.append(organization)
                    for entry in feed["entries"]:
                        parsed_entry = self.parse_feed_entry(entry)
                        external_reference = stix2.ExternalReference(
                            source_name="{}".format(
                                self.feed_summary["Source"]),
                            url=parsed_entry["ext_link"],
                        )
                        indicator_pattern = self.gen_indicator_pattern(
                            parsed_entry)
                        malware = stix2.Malware(
                            id=OpenCTIStix2Utils.generate_random_stix_id(
                                "malware"),
                            is_family=True,
                            name=parsed_entry["type"],
                            description="{} malware.".format(
                                parsed_entry["type"]),
                        )
                        bundle_objects.append(malware)
                        indicator = None
                        if self.create_indicators:
                            indicator = stix2.Indicator(
                                id=OpenCTIStix2Utils.generate_random_stix_id(
                                    "indicator"),
                                name=parsed_entry["url"],
                                description="C2 URL for: {}".format(
                                    parsed_entry["type"]),
                                labels=["C2 Server"],
                                pattern_type="stix",
                                pattern=indicator_pattern,
                                valid_from=parsed_entry["date"],
                                created=parsed_entry["date"],
                                modified=parsed_entry["date"],
                                created_by_ref=organization.id,
                                object_marking_refs=[tlp["standard_id"]],
                                external_references=[external_reference],
                                custom_properties={
                                    "x_opencti_main_observable_type": "Url"
                                },
                            )
                            bundle_objects.append(indicator)
                            relation = stix2.Relationship(
                                id=OpenCTIStix2Utils.generate_random_stix_id(
                                    "relationship"),
                                source_ref=indicator.id,
                                target_ref=malware.id,
                                relationship_type="indicates",
                                start_time=self._time_to_datetime(
                                    entry["published_parsed"]),
                                stop_time=self._time_to_datetime(
                                    entry["published_parsed"]) +
                                datetime.timedelta(0, 3),
                                description="URLs associated to: " +
                                parsed_entry["type"],
                                confidence=self.confidence_level,
                                created_by_ref=organization.id,
                                object_marking_refs=[tlp["standard_id"]],
                                created=parsed_entry["date"],
                                modified=parsed_entry["date"],
                                external_references=[external_reference],
                            )
                            bundle_objects.append(relation)
                        if self.create_observables:
                            observable_url = SimpleObservable(
                                id=OpenCTIStix2Utils.generate_random_stix_id(
                                    "x-opencti-simple-observable"),
                                key="Url.value",
                                labels=["C2 Server"],
                                value=parsed_entry["url"],
                                created_by_ref=organization.id,
                                object_marking_refs=[tlp["standard_id"]],
                                external_references=[external_reference],
                            )
                            bundle_objects.append(observable_url)
                            observable_ip = SimpleObservable(
                                id=OpenCTIStix2Utils.generate_random_stix_id(
                                    "x-opencti-simple-observable"),
                                key="IPv4-Addr.value",
                                labels=["C2 Server"],
                                value=parsed_entry["ip"],
                                created_by_ref=organization.id,
                                object_marking_refs=[tlp["standard_id"]],
                                external_references=[external_reference],
                            )
                            bundle_objects.append(observable_ip)
                            observable_domain = None
                            if "domain" in parsed_entry.keys():
                                observable_domain = SimpleObservable(
                                    id=OpenCTIStix2Utils.
                                    generate_random_stix_id(
                                        "x-opencti-simple-observable"),
                                    key="Domain-Name.value",
                                    labels=["C2 Server"],
                                    value=parsed_entry["domain"],
                                    created_by_ref=organization.id,
                                    object_marking_refs=[tlp["standard_id"]],
                                    external_references=[external_reference],
                                )
                                bundle_objects.append(observable_domain)

                            if indicator is not None:
                                relationship_1 = stix2.Relationship(
                                    id=OpenCTIStix2Utils.
                                    generate_random_stix_id("relationship"),
                                    relationship_type="based-on",
                                    created_by_ref=organization.id,
                                    source_ref=indicator.id,
                                    target_ref=observable_url.id,
                                )
                                bundle_objects.append(relationship_1)
                                relationship_2 = stix2.Relationship(
                                    id=OpenCTIStix2Utils.
                                    generate_random_stix_id("relationship"),
                                    relationship_type="based-on",
                                    created_by_ref=organization.id,
                                    source_ref=indicator.id,
                                    target_ref=observable_ip.id,
                                )
                                bundle_objects.append(relationship_2)
                                if observable_domain is not None:
                                    relationship_3 = stix2.Relationship(
                                        id=OpenCTIStix2Utils.
                                        generate_random_stix_id(
                                            "relationship"),
                                        relationship_type="based-on",
                                        created_by_ref=organization.id,
                                        source_ref=indicator.id,
                                        target_ref=observable_domain.id,
                                    )
                                    bundle_objects.append(relationship_3)

                    # create stix bundle
                    bundle = stix2.Bundle(objects=bundle_objects)
                    # send data
                    self.helper.send_stix2_bundle(
                        bundle=bundle.serialize(),
                        update=self.update_existing_data,
                        work_id=work_id,
                    )

                    # Store the current timestamp as a last run
                    message = (
                        "Connector successfully run,  storing last_run as: {}".
                        format(str(timestamp)))
                    self.helper.log_info(message)
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.api.work.to_processed(work_id, message)
                    self.helper.log_info(
                        "Last_run stored, next run in: {} seconds.".format(
                            str(round(self.interval, 2))))
                    time.sleep(60)
                else:
                    new_interval = self.interval - (timestamp - last_run)
                    self.helper.log_info("Connector will not run. \
                            Next run in: {} seconds.".format(
                        str(round(new_interval, 2))))
                    time.sleep(60)

            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Exemple #4
0
class TheHive:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.thehive_url = get_config_variable(
            "THEHIVE_URL", ["thehive", "url"], config
        )
        self.thehive_api_key = get_config_variable(
            "THEHIVE_API_KEY", ["thehive", "api_key"], config
        )
        self.thehive_check_ssl = get_config_variable(
            "THEHIVE_CHECK_SSL", ["thehive", "check_ssl"], config, False, True
        )
        self.thehive_organization_name = get_config_variable(
            "THEHIVE_ORGANIZATION_NAME", ["thehive", "organization_name"], config
        )
        self.thehive_import_from_date = get_config_variable(
            "THEHIVE_IMPORT_FROM_DATE",
            ["thehive", "import_from_date"],
            config,
            False,
            datetime.utcfromtimestamp(int(time.time())).strftime("%Y-%m-%d %H:%M:%S"),
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.identity = self.helper.api.identity.create(
            type="Organization",
            name=self.thehive_organization_name,
            description=self.thehive_organization_name,
        )
        self.thehive_api = TheHiveApi(
            self.thehive_url, self.thehive_api_key, cert=self.thehive_check_ssl
        )

    def generate_case_bundle(self, case):
        markings = []
        if case["tlp"] == 0:
            markings.append(TLP_WHITE)
        if case["tlp"] == 1:
            markings.append(TLP_GREEN)
        if case["tlp"] == 2:
            markings.append(TLP_AMBER)
        if case["tlp"] == 3:
            markings.append(TLP_RED)
        if len(markings) == 0:
            markings.append(TLP_WHITE)
        bundle_objects = []
        incident = StixXOpenCTIIncident(
            id=OpenCTIStix2Utils.generate_random_stix_id("x-opencti-incident"),
            name=case["title"],
            description=case["description"],
            first_seen=datetime.utcfromtimestamp(
                int(case["createdAt"]) / 1000
            ).strftime("%Y-%m-%dT%H:%M:%SZ"),
            last_seen=datetime.utcfromtimestamp(int(case["updatedAt"]) / 1000).strftime(
                "%Y-%m-%dT%H:%M:%SZ"
            ),
            object_marking_refs=markings,
            labels=case["tags"] if "tags" in case else [],
            created_by_ref=self.identity["standard_id"],
        )
        bundle_objects.append(incident)
        # Get observables
        observables = self.thehive_api.get_case_observables(case_id=case["id"]).json()
        for observable in observables:
            if observable["dataType"] == "hash":
                if len(observable["data"]) == 32:
                    data_type = "file_md5"
                elif len(observable["data"]) == 40:
                    data_type = "file_sha1"
                elif len(observable["data"]) == 64:
                    data_type = "file_sha256"
                else:
                    data_type = "unknown"
            else:
                data_type = observable["dataType"]
            observable_key = OBSERVABLES_MAPPING[data_type]
            if observable_key is not None:
                stix_observable = SimpleObservable(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "x-opencti-simple-observable"
                    ),
                    key=observable_key,
                    value=observable["data"],
                    description=observable["message"],
                    x_opencti_score=80 if observable["ioc"] else 50,
                    object_marking_refs=markings,
                    labels=observable["tags"] if "tags" in observable else [],
                    created_by_ref=self.identity["standard_id"],
                    x_opencti_create_indicator=observable["ioc"],
                )
                stix_observable_relation = Relationship(
                    id=OpenCTIStix2Utils.generate_random_stix_id("relationship"),
                    relationship_type="related-to",
                    created_by_ref=self.identity["standard_id"],
                    source_ref=stix_observable.id,
                    target_ref=incident.id,
                    object_marking_refs=markings,
                )
                bundle_objects.append(stix_observable)
                bundle_objects.append(stix_observable_relation)
                if observable["sighted"]:
                    fake_indicator_id = (
                        "indicator--c1034564-a9fb-429b-a1c1-c80116cc8e1e"
                    )
                    stix_sighting = Sighting(
                        id=OpenCTIStix2Utils.generate_random_stix_id("sighting"),
                        first_seen=datetime.utcfromtimestamp(
                            int(observable["startDate"] / 1000)
                        ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                        last_seen=datetime.utcfromtimestamp(
                            int(observable["startDate"] / 1000 + 3600)
                        ).strftime("%Y-%m-%dT%H:%M:%SZ"),
                        where_sighted_refs=[self.identity["standard_id"]],
                        sighting_of_ref=fake_indicator_id,
                        custom_properties={
                            "x_opencti_sighting_of_ref": stix_observable.id
                        },
                    )
                    bundle_objects.append(stix_sighting)
        bundle = Bundle(objects=bundle_objects).serialize()
        return bundle

    def run(self):
        self.helper.log_info("Starting TheHive Connector...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_case_date" in current_state:
                    last_case_date = current_state["last_case_date"]
                    self.helper.log_info(
                        "Connector last_case_date: "
                        + datetime.utcfromtimestamp(last_case_date).strftime(
                            "%Y-%m-%d %H:%M:%S"
                        )
                    )
                else:
                    last_case_date = parse(self.thehive_import_from_date).timestamp()
                    self.helper.log_info("Connector has no last_case_date")

                self.helper.log_info(
                    "Get cases since last run ("
                    + datetime.utcfromtimestamp(last_case_date).strftime(
                        "%Y-%m-%d %H:%M:%S"
                    )
                    + ")"
                )
                query = Or(
                    Gt("updatedAt", int(last_case_date * 1000)),
                    Child("case_task", Gt("createdAt", int(last_case_date * 1000))),
                    Child("case_artifact", Gt("createdAt", int(last_case_date * 1000))),
                )
                cases = self.thehive_api.find_cases(
                    query=query, sort="updatedAt", range="0-100"
                ).json()
                now = datetime.utcfromtimestamp(timestamp)
                friendly_name = "TheHive run @ " + now.strftime("%Y-%m-%d %H:%M:%S")
                work_id = self.helper.api.work.initiate_work(
                    self.helper.connect_id, friendly_name
                )
                try:
                    for case in cases:
                        stix_bundle = self.generate_case_bundle(case)
                        self.helper.send_stix2_bundle(
                            stix_bundle,
                            update=self.update_existing_data,
                            work_id=work_id,
                        )
                except Exception as e:
                    self.helper.log_error(str(e))
                # Store the current timestamp as a last run
                message = "Connector successfully run, storing last_run as " + str(
                    timestamp
                )
                self.helper.log_info(message)
                self.helper.api.work.to_processed(work_id, message)
                current_state = self.helper.get_state()
                if current_state is None:
                    current_state = {"last_case_date": timestamp}
                else:
                    current_state["last_case_date"] = timestamp
                self.helper.set_state(current_state)
                time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Exemple #5
0
class CrowdStrike:
    """CrowdStrike connector."""

    _CONFIG_NAMESPACE = "crowdstrike"

    _CONFIG_BASE_URL = f"{_CONFIG_NAMESPACE}.base_url"
    _CONFIG_CLIENT_ID = f"{_CONFIG_NAMESPACE}.client_id"
    _CONFIG_CLIENT_SECRET = f"{_CONFIG_NAMESPACE}.client_secret"
    _CONFIG_INTERVAL_SEC = f"{_CONFIG_NAMESPACE}.interval_sec"
    _CONFIG_SCOPES = f"{_CONFIG_NAMESPACE}.scopes"
    _CONFIG_TLP = f"{_CONFIG_NAMESPACE}.tlp"
    _CONFIG_CREATE_OBSERVABLES = f"{_CONFIG_NAMESPACE}.create_observables"
    _CONFIG_CREATE_INDICATORS = f"{_CONFIG_NAMESPACE}.create_indicators"
    _CONFIG_ACTOR_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.actor_start_timestamp"
    _CONFIG_REPORT_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.report_start_timestamp"
    _CONFIG_REPORT_INCLUDE_TYPES = f"{_CONFIG_NAMESPACE}.report_include_types"
    _CONFIG_REPORT_STATUS = f"{_CONFIG_NAMESPACE}.report_status"
    _CONFIG_REPORT_TYPE = f"{_CONFIG_NAMESPACE}.report_type"
    _CONFIG_REPORT_GUESS_MALWARE = f"{_CONFIG_NAMESPACE}.report_guess_malware"
    _CONFIG_INDICATOR_START_TIMESTAMP = f"{_CONFIG_NAMESPACE}.indicator_start_timestamp"
    _CONFIG_INDICATOR_EXCLUDE_TYPES = f"{_CONFIG_NAMESPACE}.indicator_exclude_types"

    _CONFIG_UPDATE_EXISTING_DATA = "connector.update_existing_data"

    _CONFIG_SCOPE_ACTOR = "actor"
    _CONFIG_SCOPE_REPORT = "report"
    _CONFIG_SCOPE_INDICATOR = "indicator"
    _CONFIG_SCOPE_YARA_MASTER = "yara_master"

    _CONFIG_REPORT_STATUS_MAPPING = {
        "new": 0,
        "in progress": 1,
        "analyzed": 2,
        "closed": 3,
    }

    _DEFAULT_CREATE_OBSERVABLES = True
    _DEFAULT_CREATE_INDICATORS = True
    _DEFAULT_REPORT_TYPE = "threat-report"

    _CONNECTOR_RUN_INTERVAL_SEC = 60

    _STATE_LAST_RUN = "last_run"

    def __init__(self) -> None:
        """Initialize CrowdStrike connector."""
        config = self._read_configuration()

        # CrowdStrike connector configuration
        base_url = self._get_configuration(config, self._CONFIG_BASE_URL)
        client_id = self._get_configuration(config, self._CONFIG_CLIENT_ID)
        client_secret = self._get_configuration(config,
                                                self._CONFIG_CLIENT_SECRET)

        self.interval_sec = self._get_configuration(config,
                                                    self._CONFIG_INTERVAL_SEC,
                                                    is_number=True)

        scopes_str = self._get_configuration(config, self._CONFIG_SCOPES)
        scopes = set()
        if scopes_str is not None:
            scopes = set(convert_comma_separated_str_to_list(scopes_str))

        tlp = self._get_configuration(config, self._CONFIG_TLP)
        tlp_marking = self._convert_tlp_to_marking_definition(tlp)

        create_observables = self._get_configuration(
            config, self._CONFIG_CREATE_OBSERVABLES)
        if create_observables is None:
            create_observables = self._DEFAULT_CREATE_OBSERVABLES
        else:
            create_observables = bool(create_observables)

        create_indicators = self._get_configuration(
            config, self._CONFIG_CREATE_INDICATORS)
        if create_indicators is None:
            create_indicators = self._DEFAULT_CREATE_INDICATORS
        else:
            create_indicators = bool(create_indicators)

        actor_start_timestamp = self._get_configuration(
            config, self._CONFIG_ACTOR_START_TIMESTAMP, is_number=True)
        if is_timestamp_in_future(actor_start_timestamp):
            raise ValueError("Actor start timestamp is in the future")

        report_start_timestamp = self._get_configuration(
            config, self._CONFIG_REPORT_START_TIMESTAMP, is_number=True)
        if is_timestamp_in_future(report_start_timestamp):
            raise ValueError("Report start timestamp is in the future")

        report_status_str = self._get_configuration(config,
                                                    self._CONFIG_REPORT_STATUS)
        report_status = self._convert_report_status_str_to_report_status_int(
            report_status_str)

        report_type = self._get_configuration(config, self._CONFIG_REPORT_TYPE)
        if not report_type:
            report_type = self._DEFAULT_REPORT_TYPE

        report_include_types_str = self._get_configuration(
            config, self._CONFIG_REPORT_INCLUDE_TYPES)
        report_include_types = []
        if report_include_types_str is not None:
            report_include_types = convert_comma_separated_str_to_list(
                report_include_types_str)

        report_guess_malware = bool(
            self._get_configuration(config, self._CONFIG_REPORT_GUESS_MALWARE))

        indicator_start_timestamp = self._get_configuration(
            config, self._CONFIG_INDICATOR_START_TIMESTAMP, is_number=True)
        if is_timestamp_in_future(indicator_start_timestamp):
            raise ValueError("Indicator start timestamp is in the future")

        indicator_exclude_types_str = self._get_configuration(
            config, self._CONFIG_INDICATOR_EXCLUDE_TYPES)
        indicator_exclude_types = []
        if indicator_exclude_types_str is not None:
            indicator_exclude_types = convert_comma_separated_str_to_list(
                indicator_exclude_types_str)

        update_existing_data = bool(
            self._get_configuration(config, self._CONFIG_UPDATE_EXISTING_DATA))

        author = self._create_author()

        # Create OpenCTI connector helper.
        self.helper = OpenCTIConnectorHelper(config)

        # Create CrowdStrike client and importers.
        client = CrowdStrikeClient(base_url, client_id, client_secret)

        # Create importers.
        importers: List[BaseImporter] = []

        if self._CONFIG_SCOPE_ACTOR in scopes:
            actor_importer = ActorImporter(
                self.helper,
                client.intel_api.actors,
                update_existing_data,
                author,
                actor_start_timestamp,
                tlp_marking,
            )

            importers.append(actor_importer)

        if self._CONFIG_SCOPE_REPORT in scopes:
            report_importer = ReportImporter(
                self.helper,
                client.intel_api.reports,
                update_existing_data,
                author,
                report_start_timestamp,
                tlp_marking,
                report_include_types,
                report_status,
                report_type,
                report_guess_malware,
            )

            importers.append(report_importer)

        if self._CONFIG_SCOPE_INDICATOR in scopes:
            indicator_importer = IndicatorImporter(
                self.helper,
                client.intel_api.indicators,
                client.intel_api.reports,
                update_existing_data,
                author,
                indicator_start_timestamp,
                tlp_marking,
                create_observables,
                create_indicators,
                indicator_exclude_types,
                report_status,
                report_type,
            )

            importers.append(indicator_importer)

        if self._CONFIG_SCOPE_YARA_MASTER in scopes:
            yara_master_importer = YaraMasterImporter(
                self.helper,
                client.intel_api.rules,
                client.intel_api.reports,
                author,
                tlp_marking,
                update_existing_data,
                report_status,
                report_type,
            )

            importers.append(yara_master_importer)

        self.importers = importers

    @staticmethod
    def _read_configuration() -> Dict[str, str]:
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/../config.yml"
        if not os.path.isfile(config_file_path):
            return {}
        return yaml.load(open(config_file_path), Loader=yaml.FullLoader)

    @staticmethod
    def _create_author() -> Identity:
        return create_organization("CrowdStrike")

    @staticmethod
    def _get_yaml_path(config_name: str) -> List[str]:
        return config_name.split(".")

    @staticmethod
    def _get_environment_variable_name(yaml_path: List[str]) -> str:
        return "_".join(yaml_path).upper()

    @classmethod
    def _get_configuration(cls,
                           config: Dict[str, Any],
                           config_name: str,
                           is_number: bool = False) -> Any:
        yaml_path = cls._get_yaml_path(config_name)
        env_var_name = cls._get_environment_variable_name(yaml_path)
        config_value = get_config_variable(env_var_name,
                                           yaml_path,
                                           config,
                                           isNumber=is_number)
        return config_value

    @classmethod
    def _convert_tlp_to_marking_definition(
            cls, tlp_value: Optional[str]) -> MarkingDefinition:
        if tlp_value is None:
            return DEFAULT_TLP_MARKING_DEFINITION
        return get_tlp_string_marking_definition(tlp_value)

    @classmethod
    def _convert_report_status_str_to_report_status_int(
            cls, report_status: str) -> int:
        return cls._CONFIG_REPORT_STATUS_MAPPING[report_status.lower()]

    def _load_state(self) -> Dict[str, Any]:
        current_state = self.helper.get_state()
        if not current_state:
            return {}
        return current_state

    @staticmethod
    def _get_state_value(state: Optional[Mapping[str, Any]],
                         key: str,
                         default: Optional[Any] = None) -> Any:
        if state is not None:
            return state.get(key, default)
        return default

    @classmethod
    def _sleep(cls, delay_sec: Optional[int] = None) -> None:
        sleep_delay = (delay_sec if delay_sec is not None else
                       cls._CONNECTOR_RUN_INTERVAL_SEC)
        time.sleep(sleep_delay)

    def _is_scheduled(self, last_run: Optional[int],
                      current_time: int) -> bool:
        if last_run is None:
            self._info("CrowdStrike connector clean run")
            return True

        time_diff = current_time - last_run
        return time_diff >= self._get_interval()

    @staticmethod
    def _current_unix_timestamp() -> int:
        return int(time.time())

    def run(self):
        """Run CrowdStrike connector."""
        self._info("Starting CrowdStrike connector...")

        if not self.importers:
            self._error("Scope(s) not configured.")
            return

        while True:
            self._info("Running CrowdStrike connector...")
            run_interval = self._CONNECTOR_RUN_INTERVAL_SEC

            try:
                timestamp = self._current_unix_timestamp()
                current_state = self._load_state()

                self.helper.log_info(f"Loaded state: {current_state}")

                last_run = self._get_state_value(current_state,
                                                 self._STATE_LAST_RUN)
                if self._is_scheduled(last_run, timestamp):
                    work_id = self._initiate_work(timestamp)

                    new_state = current_state.copy()

                    for importer in self.importers:
                        importer_state = importer.start(work_id, current_state)
                        new_state.update(importer_state)

                    new_state[
                        self._STATE_LAST_RUN] = self._current_unix_timestamp()

                    self._info("Storing new state: {0}", new_state)

                    self.helper.set_state(new_state)

                    message = (
                        f"State stored, next run in: {self._get_interval()} seconds"
                    )

                    self._info(message)

                    self._complete_work(work_id, message)
                else:
                    next_run = self._get_interval() - (timestamp - last_run)
                    run_interval = min(run_interval, next_run)

                    self._info(
                        "Connector will not run, next run in: {0} seconds",
                        next_run)

                self._sleep(delay_sec=run_interval)
            except (KeyboardInterrupt, SystemExit):
                self._info("CrowdStrike connector stopping...")
                exit(0)
            except Exception as e:  # noqa: B902
                self._error("CrowdStrike connector internal error: {0}",
                            str(e))
                self._sleep()

    def _initiate_work(self, timestamp: int) -> str:
        datetime_str = timestamp_to_datetime(timestamp)
        friendly_name = f"{self.helper.connect_name} @ {datetime_str}"
        work_id = self.helper.api.work.initiate_work(self.helper.connect_id,
                                                     friendly_name)

        self._info("New work '{0}' initiated", work_id)

        return work_id

    def _complete_work(self, work_id: str, message: str) -> None:
        self.helper.api.work.to_processed(work_id, message)

    def _get_interval(self) -> int:
        return int(self.interval_sec)

    def _info(self, msg: str, *args: Any) -> None:
        fmt_msg = msg.format(*args)
        self.helper.log_info(fmt_msg)

    def _error(self, msg: str, *args: Any) -> None:
        fmt_msg = msg.format(*args)
        self.helper.log_error(fmt_msg)
class Cybercrimetracker:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = "{}/config.yml".format(
            os.path.dirname(os.path.abspath(__file__))
        )

        config = (
            yaml.load(open(config_file_path), Loader=yaml.FullLoader)
            if os.path.isfile(config_file_path)
            else {}
        )
        self.helper = OpenCTIConnectorHelper(config)

        # Connector Config
        self.confidence_level = get_config_variable(
            "CONNECTOR_CONFIDENCE_LEVEL",
            ["connector", "confidence_level"],
            config,
            isNumber=True,
        )
        self.update_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

        # CYBERCRIME-TRACKER.NET Config
        self.feed_url = get_config_variable(
            "CYBERCRIMET_RACKER_FEED_URL", ["cybercrime-tracker", "feed_url"], config,
        )
        self.connector_tlp = get_config_variable(
            "CYBERCRIME_TRACKER_TLP", ["cybercrime-tracker", "tlp"], config,
        )
        self.interval = get_config_variable(
            "CYBERCRIMETRACKER_INTERVAL",
            ["cybercrime-tracker", "interval"],
            config,
            isNumber=True,
        )

    @staticmethod
    def _time_to_datetime(input_date: time) -> datetime:
        return datetime(
            input_date.tm_year,
            input_date.tm_mon,
            input_date.tm_mday,
            input_date.tm_hour,
            input_date.tm_min,
            input_date.tm_sec,
            tzinfo=timezone.utc,
        ).isoformat()

    def parse_feed_entry(self, entry):
        """
        Parses an entry from the feed and returns a dict with:

        date: date in iso format
        type: name of the malware associated with the C2 server
        url: the url of the C2
        ip: the IP address of the C2
        ext_link: An external link to CYBERCRIME-TRACKER.NET with details

        Note: CYBERCRIME-TRACKER.NET does not provide the protocol in the url
        as such we always assume 'http'.
        """
        parsed_entry = {}

        pattern = (
            r"(?:\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type}"
            + r"\s+-%{GREEDYDATA}:\s+%{IP:ip}|"
            + r"\[%{GREEDYDATA:cwhqid}\]\s+Type:\s+%{GREEDYDATA:type})"
        )

        entry_summary = Grok(pattern).match(entry["summary"])

        if entry_summary:
            parsed_entry["date"] = self._time_to_datetime(entry["published_parsed"])
            parsed_entry["type"] = entry_summary["type"]
            parsed_entry["ext_link"] = entry["link"]
            parsed_entry["url"] = "http://{}".format(quote(entry["title"]))
            hostname = urlparse(parsed_entry["url"]).hostname

            if entry_summary["ip"] is None:
                parsed_entry["ip"] = hostname
            else:
                parsed_entry["ip"] = entry_summary["ip"]
                parsed_entry["domain"] = hostname

            self.helper.log_info("Parsed entry: {}".format(entry["title"]))

            return parsed_entry
        else:
            self.helper.log_error("Could not parse: {}".format(entry["title"]))
            return False

    def gen_indicator_pattern(self, parsed_entry):

        if "domain" in parsed_entry.keys():
            indicator_pattern = (
                "[ipv4-addr:value='{}'] ".format(parsed_entry["ip"])
                + "AND [url:value='{}'] ".format(parsed_entry["url"])
                + "AND [domain:value='{}']".format(parsed_entry["domain"])
            )
        else:
            indicator_pattern = "[ipv4-addr:value='{}'] ".format(
                parsed_entry["ip"]
            ) + "AND [url:value='{}']".format(parsed_entry["url"])

        return indicator_pattern

    def run(self):

        self.helper.log_info("Fetching data CYBERCRIME-TRACKER.NET...")

        tag = self.helper.api.tag.create(
            tag_type="C2-Type", value="C2 Server", color="#fc236b",
        )
        tlp = self.helper.api.marking_definition.read(
            filters=[
                {"key": "definition", "values": "TLP:{}".format(self.connector_tlp)}
            ]
        )

        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()

                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info(
                        "Connector last run: {}".format(
                            datetime.utcfromtimestamp(last_run).strftime(
                                "%Y-%m-%d %H:%M:%S"
                            )
                        )
                    )
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")

                # Run if it is the first time or we are past the interval

                if last_run is None or ((timestamp - last_run) > self.interval):
                    self.helper.log_info("Connector will run!")

                    # Get Feed Content
                    feed = feedparser.parse(self.feed_url)

                    self.helper.log_info(
                        "Found: {} entries.".format(len(feed["entries"]))
                    )

                    self.feed_summary = {
                        "Source": feed["feed"]["title"],
                        "Date": self._time_to_datetime(
                            feed["feed"]["published_parsed"]
                        ),
                        "Details": feed["feed"]["subtitle"],
                        "Link": feed["feed"]["link"],
                    }

                    # Create entity for the feed.
                    organization = self.helper.api.identity.create(
                        type="Organization",
                        name="CYBERCRIME-TRACKER.NET",
                        description="Tracker collecting and sharing \
                            daily updates of C2 IPs/Urls. \
                            http://cybercrime-tracker.net",
                    )

                    for entry in feed["entries"]:

                        parsed_entry = self.parse_feed_entry(entry)

                        ext_reference = self.helper.api.external_reference.create(
                            source_name="{}".format(self.feed_summary["Source"],),
                            url=parsed_entry["ext_link"],
                        )

                        indicator_pattern = self.gen_indicator_pattern(parsed_entry)

                        # Add malware related to indicator
                        malware = self.helper.api.malware.create(
                            name=parsed_entry["type"],
                            description="{} malware.".format(parsed_entry["type"]),
                        )

                        # Add indicator
                        indicator = self.helper.api.indicator.create(
                            name=parsed_entry["url"],
                            description="C2 URL for: {}".format(parsed_entry["type"]),
                            pattern_type="stix",
                            indicator_pattern=indicator_pattern,
                            main_observable_type="URL",
                            valid_from=parsed_entry["date"],
                            created=parsed_entry["date"],
                            modified=parsed_entry["date"],
                            createdByRef=organization["id"],
                            markingDefinitions=[tlp["id"]],
                            update=self.update_data,
                        )

                        # Add tag
                        self.helper.api.stix_entity.add_tag(
                            id=indicator["id"], tag_id=tag["id"],
                        )

                        self.helper.api.stix_entity.add_external_reference(
                            id=indicator["id"],
                            external_reference_id=ext_reference["id"],
                        )

                        # Add relationship with malware
                        relation = self.helper.api.stix_relation.create(
                            fromType="Indicator",
                            fromId=indicator["id"],
                            toType="Malware",
                            toId=malware["id"],
                            relationship_type="indicates",
                            first_seen=self._time_to_datetime(
                                entry["published_parsed"]
                            ),
                            last_seen=self._time_to_datetime(entry["published_parsed"]),
                            description="URLs associated to: " + parsed_entry["type"],
                            weight=self.confidence_level,
                            role_played="C2 Server",
                            createdByRef=organization["id"],
                            created=parsed_entry["date"],
                            modified=parsed_entry["date"],
                            update=self.update_data,
                        )

                        self.helper.api.stix_entity.add_external_reference(
                            id=relation["id"],
                            external_reference_id=ext_reference["id"],
                        )

                        # Create Observables and link them to Indicator
                        observable_url = self.helper.api.stix_observable.create(
                            type="URL",
                            observable_value=parsed_entry["url"],
                            createdByRef=organization["id"],
                            markingDefinitions=[tlp["id"]],
                            update=self.update_data,
                        )

                        self.helper.api.stix_entity.add_external_reference(
                            id=observable_url["id"],
                            external_reference_id=ext_reference["id"],
                        )

                        self.helper.api.indicator.add_stix_observable(
                            id=indicator["id"], stix_observable_id=observable_url["id"],
                        )

                        observable_ip = self.helper.api.stix_observable.create(
                            type="IPv4-Addr",
                            observable_value=parsed_entry["ip"],
                            createdByRef=organization["id"],
                            markingDefinitions=[tlp["id"]],
                            update=self.update_data,
                        )

                        self.helper.api.stix_entity.add_external_reference(
                            id=observable_ip["id"],
                            external_reference_id=ext_reference["id"],
                        )

                        self.helper.api.indicator.add_stix_observable(
                            id=indicator["id"], stix_observable_id=observable_ip["id"],
                        )

                        if "domain" in parsed_entry.keys():
                            observable_domain = self.helper.api.stix_observable.create(
                                type="Domain",
                                observable_value=parsed_entry["domain"],
                                createdByRef=organization["id"],
                                markingDefinitions=[tlp["id"]],
                                update=self.update_data,
                            )

                            self.helper.api.stix_entity.add_external_reference(
                                id=observable_domain["id"],
                                external_reference_id=ext_reference["id"],
                            )

                            self.helper.api.indicator.add_stix_observable(
                                id=indicator["id"],
                                stix_observable_id=observable_domain["id"],
                            )
                            self.helper.api.stix_relation.create(
                                fromType="Domain",
                                fromId=observable_domain["id"],
                                toType="IPv4-Addr",
                                toId=observable_ip["id"],
                                relationship_type="resolves",
                                last_seen=self._time_to_datetime(
                                    entry["published_parsed"]
                                ),
                                weight=self.confidence_level,
                                createdByRef=organization["id"],
                                created=parsed_entry["date"],
                                modified=parsed_entry["date"],
                                update=self.update_data,
                            )

                    # Store the current timestamp as a last run
                    self.helper.log_info(
                        "Connector successfully run, \
                            storing last_run as: {}".format(
                            str(timestamp)
                        )
                    )
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.log_info(
                        "Last_run stored, next run in: {} seconds.".format(
                            str(round(self.interval, 2))
                        )
                    )

                    new_state = {"last_run": int(time.time())}
                    self.helper.set_state(new_state)
                    time.sleep(60)
                else:
                    new_interval = self.interval - (timestamp - last_run)
                    self.helper.log_info(
                        "Connector will not run. \
                            Next run in: {} seconds.".format(
                            str(round(new_interval, 2))
                        )
                    )
                    time.sleep(60)

            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Exemple #7
0
class FireEye:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.SafeLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.fireeye_api_url = get_config_variable("FIREEYE_API_URL",
                                                   ["fireeye", "api_url"],
                                                   config)
        self.fireeye_api_v3_public = get_config_variable(
            "FIREEYE_API_V3_PUBLIC", ["fireeye", "api_v3_public"], config)
        self.fireeye_api_v3_secret = get_config_variable(
            "FIREEYE_API_V3_SECRET", ["fireeye", "api_v3_secret"], config)
        self.fireeye_collections = get_config_variable(
            "FIREEYE_COLLECTIONS", ["fireeye", "collections"],
            config).split(",")
        self.fireeye_import_start_date = get_config_variable(
            "FIREEYE_IMPORT_START_DATE",
            ["fireeye", "import_start_date"],
            config,
        )
        self.fireeye_interval = get_config_variable("FIREEYE_INTERVAL",
                                                    ["fireeye", "interval"],
                                                    config, True)
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.added_after = parse(self.fireeye_import_start_date).timestamp()

        self.identity = self.helper.api.identity.create(
            type="Organization",
            name="FireEye, Inc.",
            description=
            "FireEye is a publicly traded cybersecurity company headquartered in Milpitas, California. It has been involved in the detection and prevention of major cyber attacks. It provides hardware, software, and services to investigate cybersecurity attacks, protect against malicious software, and analyze IT security risks. FireEye was founded in 2004.",
        )

        self.marking = self.helper.api.marking_definition.create(
            definition_type="COMMERCIAL",
            definition="FIREEYE",
            x_opencti_order=99,
            x_opencti_color="#a01526",
        )

        # Init variables
        self.auth_token = None
        self._get_token()

    def get_interval(self):
        return int(self.fireeye_interval) * 60

    def _get_token(self):
        r = requests.post(
            self.fireeye_api_url + "/token",
            auth=HTTPBasicAuth(self.fireeye_api_v3_public,
                               self.fireeye_api_v3_secret),
            data={"grant_type": "client_credentials"},
        )
        if r.status_code != 200:
            raise ValueError("FireEye Authentication failed")
        data = r.json()
        self.auth_token = data.get("access_token")

    def _search(self, stix_id, retry=False):
        self.helper.log_info("Searching for " + stix_id)
        headers = {
            "authorization": "Bearer " + self.auth_token,
            "accept": "application/vnd.oasis.stix+json; version=2.1",
            "x-app-name": "opencti-connector-4.2.2",
        }
        body = """
            {
                "queries": [
                    {
                        "type": "ENTITY_TYPE",
                        "query": "id = 'ENTITY_ID'"
                    }
                ],
                "include_connected_objects": false
            }
        """
        entity_type = stix_id.split("--")[0]
        if entity_type not in searchable_types:
            return None
        body = body.replace("ENTITY_TYPE",
                            entity_type).replace("ENTITY_ID", stix_id)
        r = requests.post(self.fireeye_api_url + "/collections/search",
                          data=body,
                          headers=headers)
        if r.status_code == 200:
            return r
        elif (r.status_code == 401 or r.status_code == 403) and not retry:
            self._get_token()
            return self._search(stix_id, True)
        elif r.status_code == 204 or r.status_code == 205:
            return None
        elif r.status_code == 401 or r.status_code == 403:
            raise ValueError("Query failed, permission denied")
        else:
            print(r)
            raise ValueError("An unknown error occurred")

    def _query(self, url, retry=False):
        headers = {
            "authorization": "Bearer " + self.auth_token,
            "accept": "application/vnd.oasis.stix+json; version=2.1",
            "x-app-name": "opencti-connector-4.2.2",
        }
        r = requests.get(url, headers=headers)
        if r.status_code == 200:
            return r
        elif (r.status_code == 401 or r.status_code == 403) and not retry:
            self._get_token()
            return self._query(url, True)
        elif r.status_code == 401 or r.status_code == 403:
            raise ValueError("Query failed, permission denied")
        else:
            raise ValueError("An unknown error occurred")

    def _send_entity(self, bundle, work_id):
        if "objects" in bundle and len(bundle) > 0:
            final_objects = []
            for stix_object in bundle["objects"]:
                if stix_object["type"] == "threat-actor":
                    stix_object["type"] = "intrusion-set"
                    stix_object["id"] = stix_object["id"].replace(
                        "threat-actor", "intrusion-set")
                if "created_by_ref" not in stix_object:
                    stix_object["created_by_ref"] = self.identity[
                        "standard_id"]
                if stix_object["type"] != "marking-definition":
                    stix_object["object_marking_refs"] = [
                        "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82"
                    ]
                    stix_object["object_marking_refs"].append(
                        self.marking["standard_id"])
                final_objects.append(stix_object)
            final_bundle = {"type": "bundle", "objects": final_objects}
            self.helper.send_stix2_bundle(
                json.dumps(final_bundle),
                update=self.update_existing_data,
                work_id=work_id,
            )

    def _import_collection(self,
                           collection,
                           last_id_modified_timestamp=None,
                           last_id=None,
                           work_id=None):
        have_next_page = True
        url = None
        last_object = None
        while have_next_page:
            if url is None:
                if last_id_modified_timestamp is not None:
                    url = (self.fireeye_api_url + "/collections/" +
                           collection + "/objects" + "?added_after=" +
                           str(self.added_after) + "&length=100" +
                           "&last_id_modified_timestamp=" +
                           str(last_id_modified_timestamp))
                else:
                    url = (self.fireeye_api_url + "/collections/" +
                           collection + "/objects" + "?added_after=" +
                           str(self.added_after) + "&length=100")
            result = self._query(url)
            parsed_result = json.loads(result.text)
            if "objects" in parsed_result and len(parsed_result) > 0:
                last_object = parsed_result["objects"][-1]
                object_ids = [
                    stix_object["id"]
                    for stix_object in parsed_result["objects"]
                ]
                if last_object["id"] != last_id:
                    final_objects = []
                    for stix_object in parsed_result["objects"]:
                        if stix_object["type"] == "threat-actor":
                            stix_object["type"] = "intrusion-set"
                            stix_object["id"] = stix_object["id"].replace(
                                "threat-actor", "intrusion-set")
                        if stix_object["type"] == "relationship":
                            # If the source_ref is not in the current bundle
                            if stix_object["source_ref"] not in object_ids:
                                # Search entity in OpenCTI
                                opencti_entity = (
                                    self.helper.api.stix_domain_object.read(
                                        id=stix_object["source_ref"]))
                                # If the entity is not found
                                if opencti_entity is None:
                                    # Search the entity in FireEye
                                    fireeye_entity = self._search(
                                        stix_object["source_ref"])
                                    # If the entity is found
                                    if fireeye_entity is not None:
                                        fireeye_entity_decoded = json.loads(
                                            fireeye_entity.text)
                                        # Send the entity before this bundle
                                        self._send_entity(
                                            fireeye_entity_decoded, work_id)
                            stix_object["source_ref"] = stix_object[
                                "source_ref"].replace("threat-actor",
                                                      "intrusion-set")
                            # Search if the entity is not in bundle
                            if stix_object["target_ref"] not in object_ids:
                                opencti_entity = (
                                    self.helper.api.stix_domain_object.read(
                                        id=stix_object["target_ref"]))
                                if opencti_entity is None:
                                    fireeye_entity = self._search(
                                        stix_object["target_ref"])
                                    if fireeye_entity is not None:
                                        fireeye_entity_decoded = json.loads(
                                            fireeye_entity.text)
                                        self._send_entity(
                                            fireeye_entity_decoded, work_id)
                            stix_object["target_ref"] = stix_object[
                                "target_ref"].replace("threat-actor",
                                                      "intrusion-set")
                        if ("object_refs" in stix_object
                                and len(stix_object["object_refs"]) > 0):
                            for object_ref in stix_object["object_refs"]:
                                if object_ref not in object_ids:
                                    opencti_entity = (self.helper.api.
                                                      stix_domain_object.read(
                                                          id=object_ref))
                                    if opencti_entity is None:
                                        fireeye_entity = self._search(
                                            object_ref)
                                        if fireeye_entity is not None:
                                            fireeye_entity_decoded = json.loads(
                                                fireeye_entity.text)
                                            self._send_entity(
                                                fireeye_entity_decoded,
                                                work_id)
                        if "created_by_ref" not in stix_object:
                            stix_object["created_by_ref"] = self.identity[
                                "standard_id"]
                        if stix_object["type"] != "marking-definition":
                            stix_object["object_marking_refs"] = [
                                "marking-definition--f88d31f6-486f-44da-b317-01333bde0b82"
                            ]
                            stix_object["object_marking_refs"].append(
                                self.marking["standard_id"])
                        final_objects.append(stix_object)
                    final_bundle = {"type": "bundle", "objects": final_objects}
                    self.helper.send_stix2_bundle(
                        json.dumps(final_bundle),
                        update=self.update_existing_data,
                        work_id=work_id,
                    )
                    headers = result.headers
                    if "Link" in headers:
                        have_next_page = True
                        link = headers["Link"].split(";")
                        url = link[0][1:-1]
                        last_id_modified_timestamp = parse_qs(
                            urlparse(
                                url).query)["last_id_modified_timestamp"][0]
                    else:
                        have_next_page = False
                else:
                    have_next_page = False
        return {
            "last_id_modified_timestamp": last_id_modified_timestamp,
            "last_id": last_object["id"] if "id" in last_object else None,
        }

    def run(self):
        while True:
            try:
                self.helper.log_info("Synchronizing with FireEye API...")
                timestamp = int(time.time())
                now = datetime.datetime.utcfromtimestamp(timestamp)
                friendly_name = "FireEye run @ " + now.strftime(
                    "%Y-%m-%d %H:%M:%S")
                work_id = self.helper.api.work.initiate_work(
                    self.helper.connect_id, friendly_name)
                current_state = self.helper.get_state()
                if (current_state is None
                        or "last_id_modified_timestamp" not in current_state):
                    self.helper.set_state({
                        "last_id_modified_timestamp": {
                            "indicators": None,
                            "reports": None,
                        },
                        "last_id": {
                            "indicators": None,
                            "reports": None,
                        },
                    })
                    current_state = self.helper.get_state()
                last_id_modified_timestamp = current_state[
                    "last_id_modified_timestamp"]
                last_id = current_state["last_id"]
                if "indicators" in self.fireeye_collections:
                    self.helper.log_info(
                        "Get indicators created after " +
                        str(last_id_modified_timestamp["indicators"]))
                    indicators_last = self._import_collection(
                        "indicators",
                        last_id_modified_timestamp["indicators"],
                        last_id["indicators"],
                        work_id,
                    )
                    current_state = self.helper.get_state()
                    self.helper.set_state({
                        "last_id_modified_timestamp": {
                            "indicators":
                            indicators_last["last_id_modified_timestamp"],
                            "reports":
                            current_state["last_id_modified_timestamp"]
                            ["reports"],
                        },
                        "last_id": {
                            "indicators": indicators_last["last_id"],
                            "reports": current_state["last_id"]["reports"],
                        },
                    })
                if "reports" in self.fireeye_collections:
                    self.helper.log_info(
                        "Get reports created after " +
                        str(last_id_modified_timestamp["reports"]))
                    reports_last = self._import_collection(
                        "reports",
                        last_id_modified_timestamp["reports"],
                        last_id["reports"],
                        work_id,
                    )
                    current_state = self.helper.get_state()
                    self.helper.set_state({
                        "last_id_modified_timestamp": {
                            "indicators":
                            current_state["last_id_modified_timestamp"]
                            ["indicators"],
                            "reports":
                            reports_last["last_id_modified_timestamp"],
                        },
                        "last_id": {
                            "indicators":
                            current_state["last_id"]["indicators"],
                            "reports": reports_last["last_id"],
                        },
                    })
                message = "End of synchronization"
                self.helper.api.work.to_processed(work_id, message)
                self.helper.log_info(message)
                time.sleep(self.get_interval())
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Exemple #8
0
class VXVault:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.vxvault_url = get_config_variable("VXVAULT_URL",
                                               ["vxvault", "url"], config)
        self.vxvault_interval = get_config_variable("VXVAULT_INTERVAL",
                                                    ["vxvault", "interval"],
                                                    config, True)
        self.create_indicators = get_config_variable(
            "VXVAULT_CREATE_INDICATORS", ["vxvault", "create_indicators"],
            config, True)
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.identity = self.helper.api.identity.create(
            type="Organization",
            name="VX Vault",
            description=
            "VX Vault is providing URLs of potential malicious payload.",
        )

    def get_interval(self):
        return int(self.vxvault_interval) * 60 * 60 * 24

    def next_run(self, seconds):
        return

    def run(self):
        self.helper.log_info("Fetching VXVault dataset...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info("Connector last run: " +
                                         datetime.utcfromtimestamp(last_run).
                                         strftime("%Y-%m-%d %H:%M:%S"))
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")
                # If the last_run is more than interval-1 day
                if last_run is None or ((timestamp - last_run) > (
                    (int(self.vxvault_interval) - 1) * 60 * 60 * 24)):
                    self.helper.log_info("Connector will run!")
                    now = datetime.utcfromtimestamp(timestamp)
                    friendly_name = "VXVault run @ " + now.strftime(
                        "%Y-%m-%d %H:%M:%S")
                    work_id = self.helper.api.work.initiate_work(
                        self.helper.connect_id, friendly_name)
                    try:
                        response = urllib.request.urlopen(
                            self.vxvault_url,
                            context=ssl.create_default_context(
                                cafile=certifi.where()),
                        )
                        image = response.read()
                        with open(
                                os.path.dirname(os.path.abspath(__file__)) +
                                "/data.txt",
                                "wb",
                        ) as file:
                            file.write(image)
                        count = 0
                        bundle_objects = []
                        with open(
                                os.path.dirname(os.path.abspath(__file__)) +
                                "/data.txt") as fp:
                            for line in fp:
                                count += 1
                                if count <= 3:
                                    continue
                                external_reference = ExternalReference(
                                    source_name="VX Vault",
                                    url="http://vxvault.net",
                                    description="VX Vault repository URL",
                                )
                                stix_observable = SimpleObservable(
                                    id=OpenCTIStix2Utils.
                                    generate_random_stix_id(
                                        "x-opencti-simple-observable"),
                                    key="Url.value",
                                    value=line,
                                    description="VX Vault URL",
                                    x_opencti_score=80,
                                    object_marking_refs=[TLP_WHITE],
                                    created_by_ref=self.
                                    identity["standard_id"],
                                    x_opencti_create_indicator=self.
                                    create_indicators,
                                    external_references=[external_reference],
                                )
                                bundle_objects.append(stix_observable)
                        bundle = Bundle(objects=bundle_objects).serialize()
                        self.helper.send_stix2_bundle(
                            bundle,
                            update=self.update_existing_data,
                            work_id=work_id,
                        )
                        if os.path.exists(
                                os.path.dirname(os.path.abspath(__file__)) +
                                "/data.txt"):
                            os.remove(
                                os.path.dirname(os.path.abspath(__file__)) +
                                "/data.txt")
                    except Exception as e:
                        self.helper.log_error(str(e))
                    # Store the current timestamp as a last run
                    message = "Connector successfully run, storing last_run as " + str(
                        timestamp)
                    self.helper.log_info(message)
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.api.work.to_processed(work_id, message)
                    self.helper.log_info(
                        "Last_run stored, next run in: " +
                        str(round(self.get_interval() / 60 / 60 / 24, 2)) +
                        " days")
                    time.sleep(60)
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        "Connector will not run, next run in: " +
                        str(round(new_interval / 60 / 60 / 24, 2)) + " days")
                    time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Exemple #9
0
class Amitt:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.SafeLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.amitt_file_url = get_config_variable("AMITT_FILE_URL",
                                                  ["amitt", "amitt_file_url"],
                                                  config)
        self.pre_amitt_file_url = get_config_variable(
            "PRE_AMITT_FILE_URL", ["amitt", "pre_amitt_file_url"], config)
        self.amitt_interval = get_config_variable("AMITT_INTERVAL",
                                                  ["amitt", "interval"],
                                                  config, True)
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )

    def get_interval(self):
        return int(self.amitt_interval) * 60 * 60 * 24

    def run(self):
        self.helper.log_info("Fetching AMITT datasets...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_run" in current_state:
                    last_run = current_state["last_run"]
                    self.helper.log_info("Connector last run: " +
                                         datetime.utcfromtimestamp(last_run).
                                         strftime("%Y-%m-%d %H:%M:%S"))
                else:
                    last_run = None
                    self.helper.log_info("Connector has never run")
                # If the last_run is more than interval-1 day
                if last_run is None or ((timestamp - last_run) > (
                    (int(self.amitt_interval) - 1) * 60 * 60 * 24)):
                    self.helper.log_info("Connector will run!")
                    amitt_data = (urllib.request.urlopen(
                        self.amitt_file_url).read().decode("utf-8"))
                    self.helper.send_stix2_bundle(
                        amitt_data,
                        entities_types=self.helper.connect_scope,
                        update=self.update_existing_data,
                    )
                    pre_amitt_data = urllib.request.urlopen(
                        self.pre_amitt_file_url).read()
                    self.helper.send_stix2_bundle(
                        pre_amitt_data.decode("utf-8"),
                        entities_types=self.helper.connect_scope,
                        update=self.update_existing_data,
                    )
                    # Store the current timestamp as a last run
                    self.helper.log_info(
                        "Connector successfully run, storing last_run as " +
                        str(timestamp))
                    self.helper.set_state({"last_run": timestamp})
                    self.helper.log_info(
                        "Last_run stored, next run in: " +
                        str(round(self.get_interval() / 60 / 60 / 24, 2)) +
                        " days")
                    time.sleep(60)
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        "Connector will not run, next run in: " +
                        str(round(new_interval / 60 / 60 / 24, 2)) + " days")
                    time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Exemple #10
0
class Amitt:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + '/config.yml'
        config = yaml.load(open(config_file_path), Loader=yaml.FullLoader
                           ) if os.path.isfile(config_file_path) else {}
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.amitt_file_url = get_config_variable('AMITT_FILE_URL',
                                                  ['amitt', 'amitt_file_url'],
                                                  config)
        self.pre_amitt_file_url = get_config_variable(
            'PRE_AMITT_FILE_URL', ['amitt', 'pre_amitt_file_url'], config)
        self.amitt_interval = get_config_variable('AMITT_INTERVAL',
                                                  ['amitt', 'interval'],
                                                  config, True)
        self.update_existing_data = get_config_variable(
            'CONNECTOR_UPDATE_EXISTING_DATA',
            ['connector', 'update_existing_data'], config)

    def get_interval(self):
        return int(self.amitt_interval) * 60 * 60 * 24

    def run(self):
        self.helper.log_info('Fetching AMITT datasets...')
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and 'last_run' in current_state:
                    last_run = current_state['last_run']
                    self.helper.log_info('Connector last run: ' +
                                         datetime.utcfromtimestamp(last_run).
                                         strftime('%Y-%m-%d %H:%M:%S'))
                else:
                    last_run = None
                    self.helper.log_info('Connector has never run')
                # If the last_run is more than interval-1 day
                if last_run is None or ((timestamp - last_run) > (
                    (int(self.amitt_interval) - 1) * 60 * 60 * 24)):
                    self.helper.log_info('Connector will run!')
                    amitt_data = urllib.request.urlopen(
                        self.amitt_file_url).read().decode('utf-8')
                    self.helper.send_stix2_bundle(amitt_data,
                                                  self.helper.connect_scope,
                                                  self.update_existing_data)
                    pre_amitt_data = urllib.request.urlopen(
                        self.pre_amitt_file_url).read()
                    self.helper.send_stix2_bundle(
                        pre_amitt_data.decode('utf-8'),
                        self.helper.connect_scope, self.update_existing_data)
                    # Store the current timestamp as a last run
                    self.helper.log_info(
                        'Connector successfully run, storing last_run as ' +
                        str(timestamp))
                    self.helper.set_state({'last_run': timestamp})
                    self.helper.log_info(
                        'Last_run stored, next run in: ' +
                        str(round(self.get_interval() / 60 / 60 / 24, 2)) +
                        ' days')
                    time.sleep(60)
                else:
                    new_interval = self.get_interval() - (timestamp - last_run)
                    self.helper.log_info(
                        'Connector will not run, next run in: ' +
                        str(round(new_interval / 60 / 60 / 24, 2)) + ' days')
                    time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info('Connector stop')
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Exemple #11
0
class TaniumConnector:
    def __init__(self):
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.tanium_url = get_config_variable("TANIUM_URL", ["tanium", "url"],
                                              config)
        self.tanium_ssl_verify = get_config_variable("TANIUM_SSL_VERIFY",
                                                     ["tanium", "ssl_verify"],
                                                     config, False, True)
        self.tanium_login = get_config_variable("TANIUM_LOGIN",
                                                ["tanium", "login"], config)
        self.tanium_password = get_config_variable("TANIUM_PASSWORD",
                                                   ["tanium", "password"],
                                                   config)
        self.tanium_indicator_types = get_config_variable(
            "TANIUM_INDICATOR_TYPES", ["tanium", "indicator_types"],
            config).split(",")
        self.tanium_observable_types = get_config_variable(
            "TANIUM_OBSERVABLE_TYPES", ["tanium", "observable_types"],
            config).split(",")
        self.tanium_import_label = get_config_variable(
            "TANIUM_IMPORT_LABEL", ["tanium", "import_label"], config, False,
            "")
        self.tanium_import_from_date = get_config_variable(
            "TANIUM_IMPORT_FROM_DATE", ["tanium", "import_from_date"], config)
        self.tanium_reputation_blacklist_label = get_config_variable(
            "TANIUM_REPUTATION_BLACKLIST_LABEL",
            ["tanium", "reputation_blacklist_label"],
            config,
            False,
            "",
        )
        self.tanium_auto_quickscan = get_config_variable(
            "TANIUM_AUTO_QUICKSCAN", ["tanium", "auto_quickscan"], config,
            False, False)
        self.tanium_computer_groups = get_config_variable(
            "TANIUM_COMPUTER_GROUPS", ["tanium", "computer_groups"], config,
            False, "").split(",")

        # Variables
        self.session = None

        # Open a session
        self._get_session()

        # Create the state
        if self.tanium_import_from_date:
            timestamp = (parse(self.tanium_import_from_date).timestamp() *
                         1000 if self.tanium_import_from_date != "now" else
                         int(round(time.time() * 1000)) - 1000)
            current_state = self.helper.get_state()
            if current_state is None:
                self.helper.set_state({"connectorLastEventId": timestamp})

        # Create the source if not exist
        self.source_id = None
        sources = self._query("get", "/plugin/products/detect3/api/v1/sources")
        for source in sources:
            if source["name"] == "OpenCTI":
                self.source_id = source["id"]
        if self.source_id is None:
            source = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources",
                {
                    "type": "api-client",
                    "name": "OpenCTI",
                    "description":
                    "Cyber Threat Intelligence knowledge imported from OpenCTI.",
                    "canAutoQuickScan": True,
                },
            )
            self.source_id = source["id"]

    def _get_session(self):
        payload = {
            "username": self.tanium_login,
            "password": self.tanium_password,
        }
        r = requests.post(
            self.tanium_url + "/api/v2/session/login",
            json=payload,
            verify=self.tanium_ssl_verify,
        )
        if r.status_code == 200:
            result = r.json()
            self.session = result["data"]["session"]
        else:
            raise ValueError("Cannot login to the Tanium API")

    def _query(
        self,
        method,
        uri,
        payload=None,
        content_type="application/json",
        type=None,
        retry=False,
    ):
        self.helper.log_info("Query " + method + " on " + uri)
        headers = {"session": self.session}
        if method != "upload":
            headers["content-type"] = content_type
        if type is not None:
            headers["type"] = type
        if content_type == "application/octet-stream":
            headers["content-disposition"] = ("attachment; filename=" +
                                              payload["filename"])
            if "name" in payload:
                headers["name"] = payload["name"]
            if "description" in payload:
                headers["description"] = payload["description"]
        if method == "get":
            r = requests.get(
                self.tanium_url + uri,
                headers=headers,
                params=payload,
                verify=self.tanium_ssl_verify,
            )
        elif method == "post":
            if content_type == "application/octet-stream":
                r = requests.post(
                    self.tanium_url + uri,
                    headers=headers,
                    data=payload["document"],
                    verify=self.tanium_ssl_verify,
                )
            elif type is not None:
                r = requests.post(
                    self.tanium_url + uri,
                    headers=headers,
                    data=payload["intelDoc"],
                    verify=self.tanium_ssl_verify,
                )
            else:
                r = requests.post(
                    self.tanium_url + uri,
                    headers=headers,
                    json=payload,
                    verify=self.tanium_ssl_verify,
                )
        elif method == "upload":
            f = open(payload["filename"], "w")
            f.write(payload["content"])
            f.close()
            files = {"hash": open(payload["filename"], "rb")}
            r = requests.post(
                self.tanium_url + uri,
                headers=headers,
                files=files,
                verify=self.tanium_ssl_verify,
            )
        elif method == "put":
            if content_type == "application/xml":
                r = requests.put(
                    self.tanium_url + uri,
                    headers=headers,
                    data=payload,
                    verify=self.tanium_ssl_verify,
                )
            else:
                r = requests.put(
                    self.tanium_url + uri,
                    headers=headers,
                    json=payload,
                    verify=self.tanium_ssl_verify,
                )
        elif method == "patch":
            r = requests.patch(
                self.tanium_url + uri,
                headers=headers,
                json=payload,
                verify=self.tanium_ssl_verify,
            )
        elif method == "delete":
            r = requests.delete(self.tanium_url + uri,
                                headers=headers,
                                verify=self.tanium_ssl_verify)
        else:
            raise ValueError("Unspported method")
        if r.status_code == 200:
            try:
                return r.json()
            except:
                return r.text
        elif r.status_code == 401 and not retry:
            self._get_session()
            return self._query(method, uri, payload, content_type, type, True)
        elif r.status_code == 401:
            raise ValueError("Query failed, permission denied")
        else:
            self.helper.log_info(r.text)

    def _get_labels(self, labels):
        # List labels
        tanium_labels = self._query("get",
                                    "/plugin/products/detect3/api/v1/labels",
                                    {"limit": 500})
        tanium_labels_dict = {}
        for tanium_label in tanium_labels:
            tanium_labels_dict[tanium_label["name"].lower()] = tanium_label
        final_labels = []
        for label in labels:
            # Label already exists
            if label["value"] in tanium_labels_dict:
                final_labels.append(tanium_labels_dict[label["value"]])
            # Create the label
            else:
                created_label = self._query(
                    "post",
                    "/plugin/products/detect3/api/v1/labels",
                    {
                        "name": label["value"],
                        "description": "Label imported from OpenCTI",
                    },
                )
                final_labels.append(created_label)
        return final_labels

    def _get_by_id(self, internal_id, yara=False):
        if yara:
            response = self._query(
                "get",
                "/plugin/products/detect3/api/v1/intels",
                {"name": internal_id + ".yara"},
            )
        else:
            response = self._query(
                "get",
                "/plugin/products/detect3/api/v1/intels",
                {"description": internal_id},
            )
        if response and len(response) > 0:
            return response[0]
        else:
            return None

    def _get_reputation_by_hash(self, hash):
        response = self._query(
            "get",
            "/plugin/products/reputation/v3/reputations/custom",
            {"search": hash},
        )
        if response["data"] and len(response["data"]) > 0:
            return response["data"][0]
        else:
            return None

    def _create_indicator_stix(self, entity, original_intel_document=None):
        if original_intel_document is None:
            intel_document = self._get_by_id(entity["id"])
            if intel_document is not None:
                return intel_document
        stix2_bundle = self.helper.api.stix2.export_entity(
            entity["entity_type"],
            entity["id"],
            "simple",
            None,
            True,
            True,
        )
        initialize_options()
        stix_indicator = slide_string(stix2_bundle)
        stix_indicator = re.sub(
            r"<indicator:Description>(.*?)<\/indicator:Description>",
            r"<indicator:Description>" + entity["id"] +
            "</indicator:Description>",
            stix_indicator,
        )
        stix_indicator = re.sub(
            r"<indicator:Description ordinality=\"1\">(.*?)<\/indicator:Description>",
            r'<indicator:Description ordinality="1">' + entity["id"] +
            "</indicator:Description>",
            stix_indicator,
        )
        payload = {"intelDoc": stix_indicator}
        if original_intel_document is not None:
            intel_document = self._query(
                "put",
                "/plugin/products/detect3/api/v1/intels/" +
                str(original_intel_document["id"]),
                stix_indicator,
                "application/xml",
                "stix",
            )
        else:
            intel_document = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources/" +
                str(self.source_id) + "/intels",
                payload,
                "application/xml",
                "stix",
            )
        return intel_document

    def _create_indicator_yara(self, entity, original_intel_document=None):
        if original_intel_document is None:
            intel_document = self._get_by_id(entity["id"], True)
            if intel_document is not None:
                return intel_document

        filename = entity["id"] + ".yara"
        if original_intel_document is not None:
            intel_document = self._query(
                "put",
                "/plugin/products/detect3/api/v1/intels/" +
                str(original_intel_document["id"]),
                {
                    "filename": filename,
                    "document": entity["pattern"],
                    "name": entity["name"],
                    "description": entity["id"],
                },
                "application/octet-stream",
                "yara",
            )
        else:
            intel_document = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources/" +
                str(self.source_id) + "/intels",
                {
                    "filename": filename,
                    "document": entity["pattern"],
                    "name": entity["name"],
                    "description": entity["id"],
                },
                "application/octet-stream",
                "yara",
            )
        return intel_document

    def _create_tanium_signal(self, entity, original_intel_document=None):
        if original_intel_document is None:
            intel_document = self._get_by_id(entity["id"])
            if intel_document is not None:
                return intel_document

        platforms = []
        if "x_mitre_platforms" in entity and len(
                entity["x_mitre_platforms"]) > 0:
            for x_mitre_platform in entity["x_mitre_platforms"]:
                if x_mitre_platform in ["Linux", "Windows", "macOS"]:
                    platforms.append(x_mitre_platform.lower(
                    ) if x_mitre_platform != "macOS" else "mac")
        if original_intel_document is not None:
            intel_document = self._query(
                "put",
                "/plugin/products/detect3/api/v1/intels/" +
                str(original_intel_document["id"]),
                {
                    "name": entity["name"],
                    "description": entity["id"],
                    "platforms": platforms,
                    "contents": entity["pattern"],
                },
            )
        else:
            intel_document = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources/" +
                str(self.source_id) + "/intels",
                {
                    "name": entity["name"],
                    "description": entity["id"],
                    "platforms": platforms,
                    "contents": entity["pattern"],
                },
            )
        return intel_document

    def _create_observable(self, entity, original_intel_document=None):
        if original_intel_document is None:
            intel_document = self._get_by_id(entity["id"])
            if intel_document is not None:
                return intel_document

        intel_type = None
        value = None
        name = None
        if entity["entity_type"] == "StixFile":
            intel_type = "file_hash"
            if "hashes" in entity:
                for hash in entity["hashes"]:
                    value = (value + hash["hash"] +
                             "\n" if value is not None else hash["hash"] +
                             "\n")
                    name = hash["hash"]

        elif entity["entity_type"] in [
                "IPv4-Addr",
                "IPv6-Addr",
                "Domain-Name",
                "X-OpenCTI-Hostname",
        ]:
            intel_type = "ip_or_host"
            value = entity["value"]
            name = entity["value"]
        if intel_type is None or value is None:
            return None

        openioc = self._query(
            "post",
            "/plugin/products/detect3/api/v1/intels/quick-add",
            {
                "exact": True,
                "name": name,
                "description": entity["id"],
                "type": intel_type,
                "text": value,
            },
        )
        openioc = re.sub(
            r"<description>(.*?)<\/description>",
            r"<description>" + entity["id"] + "</description>",
            openioc,
        )
        payload = {"intelDoc": openioc}
        if original_intel_document is not None:
            intel_document = self._query(
                "put",
                "/plugin/products/detect3/api/v1/intels/" +
                str(original_intel_document["id"]),
                payload,
                "application/xml",
                "openioc",
            )
        else:
            intel_document = self._query(
                "post",
                "/plugin/products/detect3/api/v1/sources/" +
                str(self.source_id) + "/intels",
                payload,
                "application/xml",
                "openioc",
            )

        return intel_document

    def _post_operations(self, entity, intel_document):
        if intel_document is not None and entity is not None:
            if self.tanium_auto_quickscan:
                for computer_group in self.tanium_computer_groups:
                    self._query(
                        "post",
                        "/plugin/products/detect3/api/v1/quick-scans",
                        {
                            "computerGroupId": int(computer_group),
                            "intelDocId": intel_document["id"],
                        },
                    )

            external_reference = self.helper.api.external_reference.create(
                source_name="Tanium",
                url=self.tanium_url + "/#/thr_workbench/intel/" +
                str(intel_document["id"]),
                external_id=str(intel_document["id"]),
                description="Intel document within the Tanium platform.",
            )
            if entity["entity_type"] == "Indicator":
                self.helper.api.stix_domain_object.add_external_reference(
                    id=entity["id"],
                    external_reference_id=external_reference["id"])
            else:
                self.helper.api.stix_cyber_observable.add_external_reference(
                    id=entity["id"],
                    external_reference_id=external_reference["id"])
            if len(entity["objectLabel"]) > 0:
                labels = self._get_labels(entity["objectLabel"])
                for label in labels:
                    if label is not None:
                        self._query(
                            "put",
                            "/plugin/products/detect3/api/v1/intels/" +
                            str(intel_document["id"]) + "/labels",
                            {"id": label["id"]},
                        )

    def _process_intel(self, entity_type, data, original_intel_document=None):
        entity = None
        intel_document = None
        if entity_type == "indicator":
            entity = self.helper.api.indicator.read(
                id=data["data"]["x_opencti_id"])
            if (entity is None or entity["revoked"] or entity["pattern_type"]
                    not in self.tanium_indicator_types):
                return {"entity": entity, "intel_document": intel_document}
            if entity["pattern_type"] == "stix":
                intel_document = self._create_indicator_stix(
                    entity, original_intel_document)
            elif entity["pattern_type"] == "yara":
                intel_document = self._create_indicator_yara(
                    entity, original_intel_document)
            elif entity["pattern_type"] == "tanium-signal":
                intel_document = self._create_tanium_signal(
                    entity, original_intel_document)
        elif (StixCyberObservableTypes.has_value(entity_type)
              and entity_type.lower() in self.tanium_observable_types):
            entity = self.helper.api.stix_cyber_observable.read(
                id=data["data"]["x_opencti_id"])
            if entity is None:
                return {"entity": entity, "intel_document": intel_document}
            intel_document = self._create_observable(entity,
                                                     original_intel_document)
        return {"entity": entity, "intel_document": intel_document}

    def _process_message(self, msg):
        data = json.loads(msg.data)
        entity_type = data["data"]["type"]
        # If not an indicator, not an observable to import and
        if (entity_type != "indicator"
                and entity_type not in self.tanium_observable_types
                and ("labels" in data["data"]
                     and self.tanium_reputation_blacklist_label
                     not in data["data"]["labels"])
                and self.tanium_reputation_blacklist_label != "*"):
            self.helper.log_info(
                "Not an indicator and not an observable to import, doing nothing"
            )
            return
        # Handle creation
        if msg.event == "create":
            # No label
            if ("labels" not in data["data"]
                    and self.tanium_import_label != "*"
                    and self.tanium_reputation_blacklist_label != "*"):
                self.helper.log_info(
                    "No label marked as import, doing nothing")
                return
            # Import or blacklist labels are not in the given labels
            elif (("labels" in data["data"]
                   and self.tanium_import_label not in data["data"]["labels"])
                  and self.tanium_import_label != "*"
                  and self.tanium_reputation_blacklist_label
                  not in data["data"]["labels"]
                  and self.tanium_reputation_blacklist_label != "*"):
                self.helper.log_info(
                    "No label marked as import or no global label, doing nothing"
                )
                return
            # Revoked is true
            elif "revoked" in data["data"] and data["data"]["revoked"]:
                return
            if ("labels" in data["data"]
                    and self.tanium_import_label in data["data"]["labels"]
                ) or self.tanium_import_label == "*":
                # Process intel
                processed_intel = self._process_intel(entity_type, data)
                intel_document = processed_intel["intel_document"]
                entity = processed_intel["entity"]
                # Create external reference and add object labels
                self._post_operations(entity, intel_document)
            if ("labels" in data["data"]
                    and self.tanium_reputation_blacklist_label in data["data"]
                ["labels"]) or self.tanium_reputation_blacklist_label == "*":
                if "hashes" in data["data"]:
                    entry = {"list": "blacklist"}
                    if "MD5" in data["data"]["hashes"]:
                        entry["md5"] = data["data"]["hashes"]["MD5"]
                        entry["uploadedHash"] = data["data"]["hashes"]["MD5"]
                    else:
                        entry["md5"] = ""
                    if "SHA-1" in data["data"]["hashes"]:
                        entry["sha1"] = data["data"]["hashes"]["SHA-1"]
                        entry["uploadedHash"] = data["data"]["hashes"]["SHA-1"]
                    else:
                        entry["sha1"] = ""
                    if "SHA-256" in data["data"]["hashes"]:
                        entry["sha256"] = data["data"]["hashes"]["SHA-256"]
                        entry["uploadedHash"] = data["data"]["hashes"][
                            "SHA-256"]
                    else:
                        entry["sha256"] = ""
                    entry["notes"] = ",".join(data["data"]["labels"])
                    self._query(
                        "post",
                        "/plugin/products/reputation/v3/reputations/custom/upload?append=true",
                        [entry],
                    )
        elif msg.event == "update":
            if ("x_data_update" in data["data"]
                    and "add" in data["data"]["x_data_update"]
                    and "labels" in data["data"]["x_data_update"]["add"]):
                if self.tanium_reputation_blacklist_label in data["data"][
                        "x_data_update"]["add"][
                            "labels"] and StixCyberObservableTypes.has_value(
                                data["data"]["type"]):
                    observable = self.helper.api.stix_cyber_observable.read(
                        id=data["data"]["id"])
                    observable = self.helper.api.stix2.generate_export(
                        observable)
                    if "hashes" in observable:
                        entry = {"list": "blacklist"}
                        if "MD5" in observable["hashes"]:
                            entry["md5"] = observable["hashes"]["MD5"]
                            entry["uploadedHash"] = observable["hashes"]["MD5"]
                        else:
                            entry["md5"] = ""
                        if "SHA-1" in observable["hashes"]:
                            entry["sha1"] = observable["hashes"]["SHA-1"]
                            entry["uploadedHash"] = observable["hashes"][
                                "SHA-1"]
                        else:
                            entry["sha1"] = ""
                        if "SHA-256" in observable["hashes"]:
                            entry["sha256"] = observable["hashes"]["SHA-256"]
                            entry["uploadedHash"] = observable["hashes"][
                                "SHA-256"]
                        else:
                            entry["sha256"] = ""
                        entry["notes"] = ",".join(observable["labels"])
                        self._query(
                            "post",
                            "/plugin/products/reputation/v3/reputations/custom/upload?append=true",
                            [entry],
                        )
                if (self.tanium_import_label
                        in data["data"]["x_data_update"]["add"]["labels"]):
                    # Process intel
                    processed_intel = self._process_intel(entity_type, data)
                    intel_document = processed_intel["intel_document"]
                    entity = processed_intel["entity"]
                    # Create external reference and add object labels
                    self._post_operations(entity, intel_document)
                else:
                    entity = self.helper.api.indicator.read(
                        id=data["data"]["x_opencti_id"],
                        customAttributes="""
                        pattern_type
                    """,
                    )
                    intel_document = self._get_by_id(
                        data["data"]["x_opencti_id"],
                        yara=True if entity is not None
                        and entity["pattern_type"] == "yara" else False,
                    )
                    if intel_document:
                        new_labels = []
                        for label in data["data"]["x_data_update"]["add"][
                                "labels"]:
                            new_labels.append({"value": label})
                        labels = self._get_labels(new_labels)
                        for label in labels:
                            self._query(
                                "put",
                                "/plugin/products/detect3/api/v1/intels/" +
                                str(intel_document["id"]) + "/labels",
                                {"id": label["id"]},
                            )
            elif ("x_data_update" in data["data"]
                  and "remove" in data["data"]["x_data_update"]
                  and "labels" in data["data"]["x_data_update"]["remove"]):
                if (self.tanium_reputation_blacklist_label
                        in data["data"]["x_data_update"]["remove"]["labels"]):
                    if "hashes" in data["data"]:
                        if "SHA-256" in data["data"]["hashes"]:
                            self._query(
                                "post",
                                "/plugin/products/reputation/v3/reputations/custom/delete",
                                [data["data"]["hashes"]["SHA-256"]],
                            )
                        if "SHA-1" in data["data"]["hashes"]:
                            self._query(
                                "post",
                                "/plugin/products/reputation/v3/reputations/custom/delete",
                                [data["data"]["hashes"]["SHA-1"]],
                            )
                        if "MD5" in data["data"]["hashes"]:
                            self._query(
                                "post",
                                "/plugin/products/reputation/v3/reputations/custom/delete",
                                [data["data"]["hashes"]["MD5"]],
                            )
                if (self.tanium_import_label
                        in data["data"]["x_data_update"]["remove"]["labels"]):
                    # Import label has been removed
                    intel_document = self._get_by_id(
                        data["data"]["x_opencti_id"])
                    if intel_document is not None:
                        self._query(
                            "delete",
                            "/plugin/products/detect3/api/v1/intels/" +
                            str(intel_document["id"]),
                        )
                    # Remove external references
                    if entity_type == "indicator":
                        entity = self.helper.api.indicator.read(
                            id=data["data"]["x_opencti_id"])
                    else:
                        entity = self.helper.api.stix_cyber_observable.read(
                            id=data["data"]["x_opencti_id"])
                    if (entity and "externalReferences" in entity
                            and len(entity["externalReferences"]) > 0):
                        for external_reference in entity["externalReferences"]:
                            if external_reference["source_name"] == "Tanium":
                                self.helper.api.external_reference.delete(
                                    external_reference["id"])
                else:
                    intel_document = self._get_by_id(
                        data["data"]["x_opencti_id"])
                    if intel_document:
                        new_labels = []
                        for label in data["data"]["x_data_update"]["remove"][
                                "labels"]:
                            new_labels.append({"value": label})
                        labels = self._get_labels(new_labels)
                        for label in labels:
                            self._query(
                                "delete",
                                "/plugin/products/detect3/api/v1/intels/" +
                                str(intel_document["id"]) + "/labels/" +
                                str(label["id"]),
                            )
            elif ("x_data_update" in data["data"]
                  and "replace" in data["data"]["x_data_update"]):
                if entity_type == "indicator":
                    if "pattern" in data["data"]["x_data_update"]["replace"]:
                        intel_document = self._get_by_id(
                            data["data"]["x_opencti_id"])
                        if intel_document is not None:
                            self._process_intel(entity_type, data,
                                                intel_document)
                    elif ("value" in data["data"]["x_data_update"]["replace"]
                          or "hashes"
                          in data["data"]["x_data_update"]["replace"]):
                        intel_document = self._get_by_id(
                            data["data"]["x_opencti_id"])
                        if intel_document is not None:
                            self._process_intel(entity_type, data,
                                                intel_document)
                    elif ("revoked" in data["data"]["x_data_update"]["replace"]
                          and
                          data["data"]["x_data_update"]["replace"]["revoked"]
                          is True):
                        intel_document = self._get_by_id(
                            data["data"]["x_opencti_id"])
                        if intel_document is not None:
                            self._query(
                                "delete",
                                "/plugin/products/detect3/api/v1/intels/" +
                                str(intel_document["id"]),
                            )
                            # Remove external references
                            if entity_type == "indicator":
                                entity = self.helper.api.indicator.read(
                                    id=data["data"]["x_opencti_id"])
                            else:
                                entity = self.helper.api.stix_cyber_observable.read(
                                    id=data["data"]["x_opencti_id"])
                            if (entity and "externalReferences" in entity
                                    and len(entity["externalReferences"]) > 0):
                                for external_reference in entity[
                                        "externalReferences"]:
                                    if external_reference[
                                            "source_name"] == "Tanium":
                                        self.helper.api.external_reference.delete(
                                            external_reference["id"])
        elif msg.event == "delete":
            intel_document = self._get_by_id(data["data"]["x_opencti_id"])
            if intel_document is not None:
                self._query(
                    "delete",
                    "/plugin/products/detect3/api/v1/intels/" +
                    str(intel_document["id"]),
                )
            if data["data"]["type"] == "file":
                if "hashes" in data["data"]:
                    if "SHA-256" in data["data"]["hashes"]:
                        self._query(
                            "post",
                            "/plugin/products/reputation/v3/reputations/custom/delete",
                            [data["data"]["hashes"]["SHA-256"]],
                        )
                    if "SHA-1" in data["data"]["hashes"]:
                        self._query(
                            "post",
                            "/plugin/products/reputation/v3/reputations/custom/delete",
                            [data["data"]["hashes"]["SHA-1"]],
                        )
                    if "MD5" in data["data"]["hashes"]:
                        self._query(
                            "post",
                            "/plugin/products/reputation/v3/reputations/custom/delete",
                            [data["data"]["hashes"]["MD5"]],
                        )

    def start(self):
        self.alerts_gatherer = TaniumConnectorAlertsGatherer(
            self.helper,
            self.tanium_url,
            self.tanium_login,
            self.tanium_password,
            self.tanium_ssl_verify,
        )
        self.alerts_gatherer.start()
        self.helper.listen_stream(self._process_message)
Exemple #12
0
class LastInfoSec:
    def __init__(self):
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.FullLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)

        self.lastinfosec_cti_url = "https://api.client.lastinfosec.com/v2/stix21/getbyminutes/{}?api_key={}&headers=false&platform=opencti"
        self.lastinfosec_cve_url = "https://api.client.lastinfosec.com/v2/stix21/vulnerabilities/getlasthour?api_key={}&headers=false&platform=opencti"
        self.lastinfosec_tactic_url = "https://api.client.lastinfosec.com/v2/stix21/tactic/getlast24hour?api_key={}&headers=false&platform=opencti"
        self.lastinfosec_apikey = get_config_variable(
            "CONFIG_LIS_APIKEY", ["lastinfosec", "api_key"], config)

        self.lastinfosec_cti_enabled = get_config_variable(
            "CONFIG_LIS_CTI_ENABLED", ["lastinfosec", "cti", "is_enabled"],
            config)
        self.lastinfosec_cti_interval = get_config_variable(
            "CONFIG_LIS_CTI_INTERVAL", ["lastinfosec", "cti", "interval"],
            config)

        self.lastinfosec_cve_enabled = get_config_variable(
            "CONFIG_LIS_CVE_ENABLED", ["lastinfosec", "cve", "is_enabled"],
            config)

        self.lastinfosec_tactic_enabled = get_config_variable(
            "CONFIG_LIS_TACTIC_ENABLED",
            ["lastinfosec", "tactic", "is_enabled"], config)

        self.opencti_url = get_config_variable("OPENCTI_URL",
                                               ["opencti", "url"], config)
        self.opencti_id = get_config_variable("OPENCTI_TOKEN",
                                              ["opencti", "token"], config)
        self.update_existing_data = get_config_variable(
            "OPENCTI_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"], config)
        self.proxy_http = get_config_variable("PROXY_HTTP",
                                              ["opencti", "proxy_http"],
                                              config)
        self.proxy_https = get_config_variable("PROXY_HTTPS",
                                               ["opencti", "proxy_https"],
                                               config)

        total_enabled = 0
        if self.lastinfosec_cti_enabled:
            total_enabled += 1
        if self.lastinfosec_cve_enabled:
            total_enabled += 1
        if self.lastinfosec_tactic_enabled:
            total_enabled += 1

        if total_enabled == 0:
            raise Exception("You must enable one feed")
        elif total_enabled > 1:
            raise Exception("You can enable only one feed per connector")

        self.api = OpenCTIApiClient(self.opencti_url, self.opencti_id)

    def run(self):
        self.helper.log_info("Fetching lastinfosec datasets...")
        if not self.helper.get_run_and_terminate():
            while True:
                time_to_sleep = self.process_data()
                time.sleep(time_to_sleep)
        else:
            self.process_data()

    def process_data(self):
        time_to_sleep = 0
        try:
            if (self.lastinfosec_cti_enabled
                    and self.lastinfosec_cti_url is not None
                    and self.lastinfosec_apikey is not None):
                url = self.lastinfosec_cti_url.format(
                    self.lastinfosec_cti_interval, self.lastinfosec_apikey)
                run_interval = self.lastinfosec_cti_interval * 60
                time_to_sleep = self.fetch_data(url, run_interval)
            elif (self.lastinfosec_cve_enabled
                  and self.lastinfosec_cve_url is not None
                  and self.lastinfosec_apikey is not None):
                url = self.lastinfosec_cve_url.format(self.lastinfosec_apikey)
                run_interval = 3600  # 1h in second
                time_to_sleep = self.fetch_data(url, run_interval)
            elif (self.lastinfosec_tactic_enabled
                  and self.lastinfosec_tactic_url is not None
                  and self.lastinfosec_apikey is not None):
                url = self.lastinfosec_tactic_url.format(
                    self.lastinfosec_apikey)
                run_interval = 86400  # 24h in second
                time_to_sleep = self.fetch_data(url, run_interval)
            else:
                self.helper.log_info("CTI Feed not configured")
                time.sleep(60)
                exit(0)
        except (KeyboardInterrupt, SystemExit):
            self.helper.log_info("Connector stop")
            exit(0)
        except Exception as e:
            self.helper.log_error("run:" + str(e))
            time.sleep(60)

        return time_to_sleep

    def fetch_data(self, url: str, run_interval: int):
        # Get the current timestamp and check
        start = time.perf_counter()
        time_to_sleep = 0
        timestamp = int(time.time())
        now = datetime.datetime.utcfromtimestamp(timestamp)

        proxy_dic = {}
        if self.proxy_http is not None:
            proxy_dic["http"] = self.proxy_http
        if self.proxy_https is not None:
            proxy_dic["https"] = self.proxy_https

        req = requests.get(url, proxies=proxy_dic)
        if req.status_code == 200:
            lastinfosec_data = req.json()
            if isinstance(lastinfosec_data,
                          list) and len(lastinfosec_data) > 0:
                friendly_name = "LastInfoSec CTI run @ " + now.strftime(
                    "%Y-%m-%d %H:%M:%S")
                work_id = self.helper.api.work.initiate_work(
                    self.helper.connect_id, friendly_name)
                self.push_data(lastinfosec_data, timestamp, work_id)
            stop = time.perf_counter()
            process_time_seconds = stop - start
            time_to_sleep = run_interval - process_time_seconds
        else:
            message = "Connector error run, storing last_run as {0}".format(
                timestamp)
            self.helper.set_state({"last_run": timestamp})
            self.helper.log_info(message)
            time.sleep(150)

        return time_to_sleep

    def push_data(self, bundles, timestamp, work_id):
        for bundle in bundles:
            sdata = json.dumps(bundle)
            self.helper.send_stix2_bundle(sdata, work_id=work_id)
            # Store the current timestamp as a last run
            message = "Connector successfully run, storing last_run as {0}".format(
                timestamp)
            self.helper.set_state({"last_run": timestamp})
            self.helper.api.work.to_processed(work_id, message)
            self.helper.log_info(message)