def main():
    api = TheHiveApi(url, api_key)

    session = requests.Session()
    session.headers.update({"Authorization": "Bearer {}".format(api_key)})

    obs = api.get_case_observables(caseId,
                                   query={},
                                   sort=["-startDate", "+ioc"],
                                   range="all")
    obs = obs.json()
    for ob in obs:
        r = session.get(
            '{}/api/case/artifact/{}/similar?range=all&sort=-startDate'.format(
                url, ob['id']))
        data = r.json()
        if len(data) > 0:
            print(ob['data'], len(r.json()), 'results')
            titles = []
            for case in data:
                #print(case)
                cases = api.find_cases(query=Id(case['_parent']), range="all")
                print("\t - {} [{}]".format(cases.json()[0]['title'],
                                            case['_parent']))
            print()
Пример #2
0
    async def update_case_observable(self, url, api_key, case_id, obs_id, description=None, tlp=0,
                                     is_ioc=False, is_sighted=False, tags=None, tags_mode=None):
        self.logger.info(f'Updating observable {obs_id} in case {case_id} in TheHive...')

        if not url.startswith("http"):
            url = f"http://{url}"

        api = TheHiveApi(url, api_key)
        obs_list = api.get_case_observables(case_id).json()
        obs_json = [obs for obs in obs_list if obs["id"] == obs_id][0]
        obs = CaseObservable(**obs_json)
        obs.id = obs_id

        if description:
            obs.description = description
        if tlp:
            obs.tlp = tlp
        if is_ioc is not None:
            obs.ioc = is_ioc
        if is_sighted is not None:
            obs.sighted = is_sighted
        if tags is not None:
            if tags_mode == "append":
                tags = obs.tags + tags
            obs.tags = tags

        r = api.update_case_observables(obs)

        if r.status_code == 200:
            return r.json()
        else:
            raise IOError(r.text)
Пример #3
0
def main():
    api = TheHiveApi(url, api_key)
    cases = api.find_cases(range="all")
    cases = cases.json()

    workbook = xlsxwriter.Workbook(excel_path, {'strings_to_urls': False})
    worksheet = workbook.add_worksheet()

    bold = workbook.add_format({"bold": True})
    worksheet.write("A1", "TheHive ID", bold)
    worksheet.write("B1", "CaseID", bold)
    worksheet.write("C1", "Status", bold)
    worksheet.write("D1", "Title", bold)
    worksheet.write("E1", "Date", bold)
    worksheet.write("F1", "Owner", bold)
    worksheet.write("G1", "TLP", bold)
    worksheet.write("H1", "Tag", bold)

    row = 1
    for item in cases:
        item_id = item["id"] if "id" in item.keys() else item["_id"]
        worksheet.write(row, 0, item_id)
        worksheet.write(row, 1, item["caseId"])
        worksheet.write(row, 2, item["status"])
        worksheet.write(row, 3, item["title"])
        worksheet.write(
            row,
            4,
            datetime.datetime.fromtimestamp(
                item["startDate"] / 1000).strftime("%Y-%m-%d %H:%M:%S"),
        )
        worksheet.write(row, 5, item["owner"])
        worksheet.write(row, 6, item["tlp"])
        worksheet.write(row, 7, ",".join(item["tags"]))
        row += 1

    worksheet2 = workbook.add_worksheet()

    worksheet2.write("A1", "TheHive ID")
    worksheet2.write("B1", "Observable DataType")
    worksheet2.write("C1", "Observable Value")
    worksheet2.write("D1", "IOC")
    row = 1
    for item in cases:
        item_id = item["id"] if "id" in item.keys() else item["_id"]
        obs = api.get_case_observables(item_id,
                                       query=query,
                                       sort=["-startDate", "+ioc"],
                                       range="all")
        obs = obs.json()
        for ob in obs:
            worksheet2.write(row, 0, item["caseId"])
            worksheet2.write(row, 1, ob["dataType"])
            if ob["dataType"] != "file":
                worksheet2.write(row, 2, ob["data"])
            else:
                worksheet2.write(row, 2, ",".join(ob["attachment"]["hashes"]))
            worksheet2.write(row, 3, True if ob["ioc"] == 1 else False)
            row += 1
    workbook.close()
Пример #4
0
class TheHive(AppBase):
    """
    An example of a Walkoff App.
    Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes.
    """

    __version__ = "1.1.0"
    app_name = "thehive"

    def __init__(self, redis, logger, console_logger=None):
        """
        Each app should have this __init__ to set up Redis and logging.
        :param redis:
        :param logger:
        :param console_logger:
        """
        super().__init__(redis, logger, console_logger)

    # async def run_analyzer(self, apikey, url, title_query):
    #    self.thehive = TheHiveApi(url, apikey, cert=False)

    #    response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[])
    #    return response.text

    def __connect_thehive(self, url, apikey, organisation):
        if organisation:
            self.thehive = TheHiveApi(url,
                                      apikey,
                                      cert=False,
                                      organisation=organisation)
        else:
            self.thehive = TheHiveApi(url, apikey, cert=False)

    async def search_case_title(self, apikey, url, organisation, title_query):
        self.__connect_thehive(url, apikey, organisation)

        response = self.thehive.find_cases(query=ContainsString(
            "title", title_query),
                                           range="all",
                                           sort=[])

        return response.text

    async def custom_search(self,
                            apikey,
                            url,
                            organisation,
                            search_for,
                            custom_query,
                            range="all"):
        self.__connect_thehive(url, apikey, organisation)

        try:
            custom_query = json.loads(custom_query)
        except:
            # raise IOError("Invalid JSON payload received.")
            pass

        if search_for == "alert":
            response = self.thehive.find_alerts(query=custom_query,
                                                range="all",
                                                sort=[])
        else:
            response = self.thehive.find_cases(query=custom_query,
                                               range="all",
                                               sort=[])

        if (response.status_code == 200 or response.status_code == 201
                or response.status_code == 202):
            return response.text
        else:
            raise IOError(response.text)

    async def add_case_artifact(
        self,
        apikey,
        url,
        organisation,
        case_id,
        data,
        datatype,
        tags=None,
        tlp=None,
        ioc=None,
        sighted=None,
        description="",
    ):
        self.__connect_thehive(url, apikey, organisation)

        tlp = int(tlp) if tlp else 2
        ioc = True if ioc.lower() == "true" else False
        sighted = True if sighted.lower() == "true" else False
        if not description:
            description = "Created by shuffle"

        tags = (tags.split(", ")
                if ", " in tags else tags.split(",") if "," in tags else [])

        item = thehive4py.models.CaseObservable(
            dataType=datatype,
            data=data,
            tlp=tlp,
            ioc=ioc,
            sighted=sighted,
            tags=tags,
            message=description,
        )

        return self.thehive.create_case_observable(case_id, item).text

    async def search_alert_title(self,
                                 apikey,
                                 url,
                                 organisation,
                                 title_query,
                                 search_range="0-25"):
        self.__connect_thehive(url, apikey, organisation)

        # Could be "all" too
        if search_range == "":
            search_range = "0-25"

        response = self.thehive.find_alerts(query=ContainsString(
            "title", title_query),
                                            range=search_range,
                                            sort=[])

        return response.text

    async def create_case(
        self,
        apikey,
        url,
        organisation,
        template,
        title,
        description="",
        tlp=1,
        severity=1,
        tags="",
    ):
        self.__connect_thehive(url, apikey, organisation)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-2, not %s" % tlp
            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 0-2, not %s" % tlp

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 2 or severity < 0:
            return "Severity needs to be a number from 0-2, not %d" % tlp

        Casetemplate = template if template else None

        case = thehive4py.models.Case(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
            template=Casetemplate,
        )

        try:
            ret = self.thehive.create_case(case)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert(
        self,
        apikey,
        url,
        organisation,
        type,
        source,
        sourceref,
        title,
        description="",
        tlp=1,
        severity=1,
        tags="",
        artifacts="",
    ):
        self.__connect_thehive(url, apikey, organisation)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-3, not %s" % tlp

            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 1-3, not %s" % severity

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 3 or severity < 1:
            return "Severity needs to be a number from 1-3, not %d" % severity

        all_artifacts = []
        if artifacts != "":
            # print("ARTIFACTS: %s" % artifacts)
            if isinstance(artifacts, str):
                # print("ITS A STRING!")
                try:
                    artifacts = json.loads(artifacts)
                except:
                    print("[ERROR] Error in parsing artifacts!")

            # print("ART HERE: %s" % artifacts)
            # print("ART: %s" % type(artifacts))
            if isinstance(artifacts, list):
                print("ITS A LIST!")
                for item in artifacts:
                    print("ITEM: %s" % item)
                    try:
                        artifact = thehive4py.models.AlertArtifact(
                            dataType=item["data_type"],
                            data=item["data"],
                        )

                        try:
                            artifact["message"] = item["message"]
                        except:
                            pass

                        if item["data_type"] == "ip":
                            try:
                                if item["is_private_ip"]:
                                    message += " IP is private."
                            except:
                                pass

                        all_artifacts.append(artifact)
                    except KeyError as e:
                        print("Error in artifacts: %s" % e)

        alert = thehive4py.models.Alert(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
            type=type,
            source=source,
            sourceRef=sourceref,
            artifacts=all_artifacts,
        )

        try:
            ret = self.thehive.create_alert(alert)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert_artifact(
        self,
        apikey,
        url,
        organisation,
        alert_id,
        dataType,
        data,
        message=None,
        tlp="2",
        ioc="False",
        sighted="False",
        ignoreSimilarity="False",
        tags=None,
    ):
        self.__connect_thehive(url, apikey, organisation, version=4)

        if tlp:
            tlp = int(tlp)
        else:
            tlp = 2

        ioc = ioc.lower().strip() == "true"
        sighted = sighted.lower().strip() == "true"
        ignoreSimilarity = ignoreSimilarity.lower().strip() == "true"

        if tags:
            tags = [x.strip() for x in tags.split(",")]
        else:
            tags = []

        alert_artifact = thehive4py.models.AlertArtifact(
            dataType=dataType,
            data=data,
            message=message,
            tlp=tlp,
            ioc=ioc,
            sighted=sighted,
            ignoreSimilarity=ignoreSimilarity,
            tags=tags,
        )

        try:
            ret = self.thehive.create_alert_artifact(alert_id, alert_artifact)
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

        if ret.status_code > 299:
            raise ConnectionError(ret.text)

        return ret.text

    # Gets an item based on input. E.g. field_type = Alert
    async def get_item(self, apikey, url, organisation, field_type, cur_id):
        self.__connect_thehive(url, apikey, organisation)

        newstr = ""
        ret = ""
        if field_type.lower() == "alert":
            ret = self.thehive.get_alert(cur_id + "?similarity=1")
        elif field_type.lower() == "case":
            ret = self.thehive.get_case(cur_id)
        elif field_type.lower() == "case_observables":
            ret = self.thehive.get_case_observables(cur_id)
        elif field_type.lower() == "case_task":
            ret = self.thehive.get_case_task(cur_id)
        elif field_type.lower() == "case_tasks":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "case_template":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "linked_cases":
            ret = self.thehive.get_linked_cases(cur_id)
        elif field_type.lower() == "task_log":
            ret = self.thehive.get_task_log(cur_id)
        elif field_type.lower() == "task_logs":
            ret = self.thehive.get_task_logs(cur_id)
        else:
            return (
                "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info."
                % field_type)

        return ret.text

    async def close_alert(self, apikey, url, organisation, alert_id):
        self.__connect_thehive(url, apikey, organisation)
        return self.thehive.mark_alert_as_read(alert_id).text

    async def reopen_alert(self, apikey, url, organisation, alert_id):
        self.__connect_thehive(url, apikey, organisation)
        return self.thehive.mark_alert_as_unread(alert_id).text

    async def create_case_from_alert(self,
                                     apikey,
                                     url,
                                     organisation,
                                     alert_id,
                                     case_template=None):
        self.__connect_thehive(url, apikey, organisation)
        response = self.thehive.promote_alert_to_case(
            alert_id=alert_id, case_template=case_template)
        return response.text

    async def merge_alert_into_case(self, apikey, url, organisation, alert_id,
                                    case_id):
        self.__connect_thehive(url, apikey, organisation)
        req = url + f"/api/alert/{alert_id}/merge/{case_id}"
        ret = requests.post(req, auth=self.thehive.auth)
        return ret.text

    # Not sure what the data should be
    async def update_field(self, apikey, url, organisation, field_type, cur_id,
                           field, data):
        # This is kinda silly but..
        if field_type.lower() == "alert":
            newdata = {}

            if data.startswith("%s"):
                ticket = self.thehive.get_alert(cur_id)
                if ticket.status_code != 200:
                    pass

                newdata[field] = "%s%s" % (ticket.json()[field], data[2:])
            else:
                newdata[field] = data

            # Bleh
            url = "%s/api/alert/%s" % (url, cur_id)
            if field == "status":
                if data == "New" or data == "Updated":
                    url = "%s/markAsUnread" % url
                elif data == "Ignored":
                    url = "%s/markAsRead" % url

                ret = requests.post(
                    url,
                    headers={
                        "Content-Type": "application/json",
                        "Authorization": "Bearer %s" % apikey,
                    },
                )
            else:
                ret = requests.patch(
                    url,
                    headers={
                        "Content-Type": "application/json",
                        "Authorization": "Bearer %s" % apikey,
                    },
                    json=newdata,
                )

            return str(ret.status_code)
        else:
            return (
                "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info."
                % field_type)

    # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex
    async def delete_alert_artifact(self, apikey, url, organisation,
                                    artifact_id):
        self.__connect_thehive(url, apikey, organisation, version=4)
        return self.thehive.delete_alert_artifact(artifact_id).text

    # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex
    async def run_analyzer(self, apikey, url, organisation, cortex_id,
                           analyzer_id, artifact_id):
        self.__connect_thehive(url, apikey, organisation)
        return self.thehive.run_analyzer(cortex_id, artifact_id,
                                         analyzer_id).text

    # Creates a task log in TheHive with file
    async def create_task_log(self,
                              apikey,
                              url,
                              organisation,
                              task_id,
                              message,
                              filedata={}):
        if filedata["success"] == False:
            return "No file to upload. Skipping message."

        headers = {
            "Authorization": "Bearer %s" % apikey,
        }

        files = {}
        if len(filedata["data"]) > 0:
            files = {
                "attachment": (filedata["filename"], filedata["data"]),
            }

        data = {"_json": """{"message": "%s"}""" % message}
        response = requests.post(
            "%s/api/case/task/%s/log" % (url, task_id),
            headers=headers,
            files=files,
            data=data,
        )
        return response.text

    # Creates an observable as a file in a case
    async def create_case_file_observable(self, apikey, url, organisation,
                                          case_id, tags, filedata):
        if filedata["success"] == False:
            return "No file to upload. Skipping message."

        headers = {
            "Authorization": "Bearer %s" % apikey,
        }

        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]

        files = {}
        if len(filedata["data"]) > 0:
            files = {
                "attachment": (filedata["filename"], filedata["data"]),
            }

        outerarray = {"dataType": "file", "tags": tags}
        data = {"_json": """%s""" % json.dumps(outerarray)}
        response = requests.post(
            "%s/api/case/%s/artifact" % (url, case_id),
            headers=headers,
            files=files,
            data=data,
            verify=False,
        )
        return response.text

    # Get all artifacts of a given case
    async def get_case_artifacts(
        self,
        apikey,
        url,
        organisation,
        case_id,
        dataType,
    ):
        self.__connect_thehive(url, apikey, organisation)

        query = And(Eq("dataType", dataType)) if dataType else {}

        # Call the API
        response = self.thehive.get_case_observables(
            case_id, query=query, sort=["-startDate", "+ioc"], range="all")

        # Display the result
        if response.status_code == 200:
            # Get response data
            list = response.json()

            # Display response data
            return (json.dumps(list, indent=4, sort_keys=True)
                    if list else json.dumps(
                        {
                            "status": 200,
                            "message": "No observable results"
                        },
                        indent=4,
                        sort_keys=True,
                    ))
        else:
            return f"Failure: {response.status_code}/{response.text}"

    async def close_case(
        self,
        apikey,
        url,
        organisation,
        id,
        resolution_status="",
        impact_status="",
        summary="",
    ):

        self.__connect_thehive(url, apikey, organisation)
        case = self.thehive.case(id)
        case.status = "Resolved"
        case.summary = summary
        case.resolutionStatus = resolution_status
        case.impactStatus = impact_status

        result = self.thehive.update_case(
            case,
            fields=[
                "status",
                "summary",
                "resolutionStatus",
                "impactStatus",
            ],
        )

        return json.dumps(result.json(), indent=4, sort_keys=True)

    # Update TheHive Case
    async def update_case(
        self,
        apikey,
        url,
        organisation,
        id,
        title="",
        description="",
        severity=None,
        owner="",
        flag=None,
        tlp=None,
        pap=None,
        tags="",
        status="",
        custom_fields=None,
        custom_json=None,
    ):
        self.__connect_thehive(url, apikey, organisation)

        # Get current case data and update fields if new data exists
        case = self.thehive.get_case(id).json()
        print(case)

        case_title = title if title else case["title"]
        case_description = description if description else case["description"]
        case_severity = int(severity) if severity else case["severity"]
        case_owner = owner if owner else case["owner"]
        case_flag = ((False if flag.lower() == "false" else True)
                     if flag else case["flag"])
        case_tlp = int(tlp) if tlp else case["tlp"]
        case_pap = int(pap) if pap else case["pap"]
        case_tags = tags.split(",") if tags else case["tags"]
        case_tags = tags.split(",") if tags else case["tags"]

        case_status = status if status else case["status"]
        case_customFields = case["customFields"]

        # Prepare the customfields
        customfields = CustomFieldHelper()
        if case_customFields:
            for key, value in case_customFields.items():
                if list(value)[0] == "integer":
                    customfields.add_integer(key, list(value.items())[0][1])
                elif list(value)[0] == "string":
                    customfields.add_string(key, list(value.items())[0][1])
                elif list(value)[0] == "boolean":
                    customfields.add_boolean(key, list(value.items())[0][1])
                elif list(value)[0] == "float":
                    customfields.add_float(key, list(value.items())[0][1])
                else:
                    print(
                        f'The value type "{value}" of the field {key} is not suported by the function.'
                    )

        custom_fields = json.loads(custom_fields) if custom_fields else {}
        for key, value in custom_fields.items():
            if type(value) == int:
                customfields.add_integer(key, value)
            elif type(value) == str:
                customfields.add_string(key, value)
            elif type(value) == bool:
                customfields.add_boolean(key, value)
            elif type(value) == float:
                customfields.add_float(key, value)
            else:
                print(
                    f'The value type "{value}" of the field {key} is not suported by the function.'
                )

        customfields = customfields.build()

        custom_json = json.loads(custom_json) if custom_json else {}

        # Prepare the fields to be updated
        case = Case(
            id=id,
            title=case_title,
            description=case_description,
            severity=case_severity,
            owner=case_owner,
            flag=case_flag,
            tlp=case_tlp,
            pap=case_pap,
            tags=case_tags,
            status=case_status,
            customFields=customfields,
            json=custom_json,
        )

        # resolutionStatus=case_resolutionStatus,

        result = self.thehive.update_case(
            case,
            fields=[
                "title",
                "description",
                "severity",
                "owner",
                "flag",
                "tlp",
                "pap",
                "tags",
                "customFields",
                "status",
            ],
        )

        return json.dumps(result.json(), indent=4, sort_keys=True)

    # Get TheHive Organisations
    async def get_organisations(
        self,
        apikey,
        url,
        organisation,
    ):
        headers = {
            "Authorization": f"Bearer {apikey}",
            "Content-Type": "application/json",
        }

        response = requests.get(
            f"{url}/api/organisation",
            headers=headers,
            verify=False,
        )

        return response.text

    # Create TheHive Organisation
    async def create_organisation(
        self,
        apikey,
        url,
        organisation,
        name,
        description,
    ):
        headers = {
            "Authorization": f"Bearer {apikey}",
            "Content-Type": "application/json",
        }

        data = {"name": f"{name}", "description": f"{description}"}

        response = requests.post(
            f"{url}/api/organisation",
            headers=headers,
            json=data,
            verify=False,
        )

        return response.text

    # Create User in TheHive
    async def create_user(
        self,
        apikey,
        url,
        organisation,
        login,
        name,
        profile,
    ):
        headers = {
            "Authorization": f"Bearer {apikey}",
            "Content-Type": "application/json",
        }

        data = {
            "login": f"{login}",
            "name": f"{name}",
            "profile": f"{profile}",
            "organisation": f"{organisation}",
        }

        response = requests.post(
            f"{url}/api/v1/user",
            headers=headers,
            json=data,
            verify=False,
        )

        return response.text
Пример #5
0
class TheHive:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.SafeLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.thehive_url = get_config_variable("THEHIVE_URL",
                                               ["thehive", "url"], config)
        self.thehive_api_key = get_config_variable("THEHIVE_API_KEY",
                                                   ["thehive", "api_key"],
                                                   config)
        self.thehive_check_ssl = get_config_variable("THEHIVE_CHECK_SSL",
                                                     ["thehive", "check_ssl"],
                                                     config, False, True)
        self.thehive_organization_name = get_config_variable(
            "THEHIVE_ORGANIZATION_NAME", ["thehive", "organization_name"],
            config)
        self.thehive_import_from_date = get_config_variable(
            "THEHIVE_IMPORT_FROM_DATE",
            ["thehive", "import_from_date"],
            config,
            False,
            datetime.utcfromtimestamp(int(
                time.time())).strftime("%Y-%m-%d %H:%M:%S"),
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.identity = self.helper.api.identity.create(
            type="Organization",
            name=self.thehive_organization_name,
            description=self.thehive_organization_name,
        )
        self.thehive_api = TheHiveApi(self.thehive_url,
                                      self.thehive_api_key,
                                      cert=self.thehive_check_ssl)

    def generate_case_bundle(self, case):
        markings = []
        if case["tlp"] == 0:
            markings.append(TLP_WHITE)
        if case["tlp"] == 1:
            markings.append(TLP_GREEN)
        if case["tlp"] == 2:
            markings.append(TLP_AMBER)
        if case["tlp"] == 3:
            markings.append(TLP_RED)
        if len(markings) == 0:
            markings.append(TLP_WHITE)
        bundle_objects = []
        incident = StixXOpenCTIIncident(
            id=OpenCTIStix2Utils.generate_random_stix_id("x-opencti-incident"),
            name=case["title"],
            description=case["description"],
            first_seen=datetime.utcfromtimestamp(
                int(case["createdAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"),
            last_seen=datetime.utcfromtimestamp(
                int(case["updatedAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"),
            object_marking_refs=markings,
            labels=case["tags"] if "tags" in case else [],
            created_by_ref=self.identity["standard_id"],
        )
        bundle_objects.append(incident)
        # Get observables
        observables = self.thehive_api.get_case_observables(
            case_id=case["id"]).json()
        for observable in observables:
            if observable["dataType"] == "hash":
                if len(observable["data"]) == 32:
                    data_type = "file_md5"
                elif len(observable["data"]) == 40:
                    data_type = "file_sha1"
                elif len(observable["data"]) == 64:
                    data_type = "file_sha256"
                else:
                    data_type = "unknown"
            else:
                data_type = observable["dataType"]
            observable_key = OBSERVABLES_MAPPING[data_type]
            if observable_key is not None:
                stix_observable = SimpleObservable(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "x-opencti-simple-observable"),
                    key=observable_key,
                    value=observable["data"],
                    description=observable["message"],
                    x_opencti_score=80 if observable["ioc"] else 50,
                    object_marking_refs=markings,
                    labels=observable["tags"] if "tags" in observable else [],
                    created_by_ref=self.identity["standard_id"],
                    x_opencti_create_indicator=observable["ioc"],
                )
                stix_observable_relation = Relationship(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "relationship"),
                    relationship_type="related-to",
                    created_by_ref=self.identity["standard_id"],
                    source_ref=stix_observable.id,
                    target_ref=incident.id,
                    object_marking_refs=markings,
                )
                bundle_objects.append(stix_observable)
                bundle_objects.append(stix_observable_relation)
                if observable["sighted"]:
                    fake_indicator_id = (
                        "indicator--c1034564-a9fb-429b-a1c1-c80116cc8e1e")
                    stix_sighting = Sighting(
                        id=OpenCTIStix2Utils.generate_random_stix_id(
                            "sighting"),
                        first_seen=datetime.utcfromtimestamp(
                            int(observable["startDate"] /
                                1000)).strftime("%Y-%m-%dT%H:%M:%SZ"),
                        last_seen=datetime.utcfromtimestamp(
                            int(observable["startDate"] / 1000 +
                                3600)).strftime("%Y-%m-%dT%H:%M:%SZ"),
                        where_sighted_refs=[self.identity["standard_id"]],
                        sighting_of_ref=fake_indicator_id,
                        custom_properties={
                            "x_opencti_sighting_of_ref": stix_observable.id
                        },
                    )
                    bundle_objects.append(stix_sighting)
        bundle = Bundle(objects=bundle_objects).serialize()
        return bundle

    def run(self):
        self.helper.log_info("Starting TheHive Connector...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_case_date" in current_state:
                    last_case_date = current_state["last_case_date"]
                    self.helper.log_info(
                        "Connector last_case_date: " +
                        datetime.utcfromtimestamp(last_case_date).strftime(
                            "%Y-%m-%d %H:%M:%S"))
                else:
                    last_case_date = parse(
                        self.thehive_import_from_date).timestamp()
                    self.helper.log_info("Connector has no last_case_date")

                self.helper.log_info("Get cases since last run (" +
                                     datetime.utcfromtimestamp(last_case_date).
                                     strftime("%Y-%m-%d %H:%M:%S") + ")")
                query = Or(
                    Gt("updatedAt", int(last_case_date * 1000)),
                    Child("case_task",
                          Gt("createdAt", int(last_case_date * 1000))),
                    Child("case_artifact",
                          Gt("createdAt", int(last_case_date * 1000))),
                )
                cases = self.thehive_api.find_cases(query=query,
                                                    sort="updatedAt",
                                                    range="0-100").json()
                now = datetime.utcfromtimestamp(timestamp)
                friendly_name = "TheHive run @ " + now.strftime(
                    "%Y-%m-%d %H:%M:%S")
                work_id = self.helper.api.work.initiate_work(
                    self.helper.connect_id, friendly_name)
                try:
                    for case in cases:
                        stix_bundle = self.generate_case_bundle(case)
                        self.helper.send_stix2_bundle(
                            stix_bundle,
                            update=self.update_existing_data,
                            work_id=work_id,
                        )
                except Exception as e:
                    self.helper.log_error(str(e))
                # Store the current timestamp as a last run
                message = "Connector successfully run, storing last_run as " + str(
                    timestamp)
                self.helper.log_info(message)
                self.helper.api.work.to_processed(work_id, message)
                current_state = self.helper.get_state()
                if current_state is None:
                    current_state = {"last_case_date": timestamp}
                else:
                    current_state["last_case_date"] = timestamp
                self.helper.set_state(current_state)
                time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Пример #6
0
class Reporter(Responder):
    def __init__(self):
        Responder.__init__(self)
        self.thehive_instance = self.get_param('config.thehive_instance',
                                               'localhost:9000')
        self.thehive_api = self.get_param('config.thehive_api',
                                          'YOUR_KEY_HERE')
        self.api = TheHiveApi(self.thehive_instance, self.thehive_api)
        self.tmpPath = self.get_param('config.tmp_file_location')

    def getSummary(self, severity):

        # Summary Fields - Severity
        if (severity == 1):
            severity = "Low"
        elif (severity == 2):
            severity = "Medium"
        elif (severity == 3):
            severity = "High"
        else:
            severity = "unknown"

        return severity

    def getTLP(self, tlp):

        # Summary Fields - TLP
        if (tlp == 0):
            tlp = [
                '![TLP:WHITE](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE0AAAAeCAYAAABpE5PpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo5M0JERkQzMDg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo5M0JERkQzMTg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjkzQkRGRDJFODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjkzQkRGRDJGODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+49AQNQAACDpJREFUeNrsmWdoVFkUx8+M0dhb7L1jjw3sBUXBBmrAgHXVWFaCYhcRFesHQUVWDYkximj8ItGoWDb2ih17jYkEy9rFXvf+Dlx3jJk3kzf7abMHhkzevPt/957zP/V5vn//LlaeP38evmjRouitW7dGZ2Zm1gsLC/Oay//ckIfky5cvntKlSz/s1q3brrlz5yY2atToL/ubxypt//79jUaNGrWmePHiHdu0aSMVK1aUvCwej0devnwp58+flxs3bmRMnz590tSpU7f9UFpaWlrjqKiotIEDB5bv3LmzfPv2Tb5+/Sp5Xbxerxhvk7t378ry5ctlypQpw2bPnr3R8+TJk/zt27f/s0uXLp1R2Js3b+R/+VkKFiwojx49kiVLlrxISUlpm+/z588jzIUJ0dHR8vbt26Cpmz9/fgkPD1dL2A/XsA5MDdW6fPLly6fP8o27bjHB8sVEgsU18U0iIiLE6KrQwYMHxWP+STMK69a6dWv58OFDUFr/+PGjXL9+HV+XFy9e6MPZSKlSpaR+/frSoEEDVWgweNYIBQoU0O+G+WpVDMgh2SzxtXDhwvLp06dchQ2MCAZ7BPPVq1d6vUSJEopZsmRJVYhRRkAslA2OcdObYQaoVtmyZQNuBmtxsGPHjgnarlatmuDODRs2VEW+f/9eFXno0CHZvn27dO3aVTp27KgHdWKeZcDZs2fl5MmT+r1q1apSpkwZVdzNmzfl8ePHUrlyZenevbsqMZAxMAJ7unPnju6VkANm+fLl9ffLly/L3r171RAmO0rt2rUV04l5nKFYsWLsNyLMKOI7FnFawMFYtGrVKv2elJQkHTp0yPHeOXPmyNGjR2XGjBly6dIlGT16tK7JySg8lwNt2LBBTHqXBQsWiImtUqhQoZ/uS09Pl3Xr1smaNWv0d4yBkZxcMTk5WR48eCCxsbEyYMAAxfcVU16JKa30TFWqVBGTCHWPTgYGGw8KWIdxI7JixQpp2rSpKsSfwqxwKO6LjIyUlStXqkEsjhUYRUrnd+Lpvn37pGfPnr8oDKlVq5YsXLhQdu7cKRcvXpTU1FRlSU4M4zmrV69WRuIVMTExvygM4RoGPXLkiLprXFycrrXxzonF3kC+jGY3b94szZs3l4SEBLViMMJ98fHx0qRJE7U6OL4Pxqr8Pm3aNP0EI/Xq1ZPdu3dLRkaGHD9+XF0we7xlr40bN5aNGzdK0aJFA2KaulQ2bdqksXjLli2/YGYXJUCgQEqcevbsmdLYjbDu6dOnigOePdyePXukU6dOMmbMmFzhwaDExESNnQR2y2DiLeGAOIgb51ZY8/r1a7ly5cqPpOTX+wKx5cCBAzJu3DilsBshQ40fP14DMngckix07949MYWiK0zCRJ8+fVRxvgfkf1gbiC05Ce5OHD58+HBAF/U6xTIYhuX69esXUp3EenDAw03JXi1atNCM5lYGDx6sbkp2Jj4S9MHu3bu3a0xiKlgPHz50DENeJ5ZlZWVpaVGhQoWQlEaqr169uuLhopmZmWK6kJAwiVuUALg+mCiQa76x003lT43J/lwrDWZQH/0bQjFJmof6lBl16tQJCQ9GUF+SgW0BW6NGjZD3iXHZZ/ZsH3RMI8MFCoq5OaStBamFQmGEr2F9MW2iCUU4b6A20OtUAZOOaT/+DaE9KlKkiB6SoHv//v2QMWEXWOwVbDqHUIXzUqo4FfteJ5ZVqlRJDxdsD+lPWE+cAA/ccuXKadsUihAfCR+4KMmApHL16tWQlQYGHYJTW+nINOIQByYNhyJU3eCAR7NPwKZaD8UYdAd4Aqyg6SYWkfXoGNzKuXPnlK1guVIa9CQOtWrVSludUIT1LVu2VDwOSEZG6CfdCImEApd2DTz2ajPfsmXLXO+TtdSAxDVX7onACkoDpgX0c26Edbdv39Z+FTw7n+rbt69ukpottzJ58mQdQ5GBwbJ7ZbICWxgo5FbWrl2rLGVyY/fpSmm20R4xYoTMnz9fpwK5kW3btunkgvXgWOtxUFyVYpJmnfYnWJk0aZKcOnVK1/m6t93r8OHDZd68edqDBiv0ngwEsu/Tb9Y2VJxgaF6aQjGnVMs1WiGsCvC7d++kXbt2jsUfgZl7UTS9JTGCa9mnodRVTDW4lxjCVMRf2QCDRo4cqfP6sWPH5jgh5n/aPZpvZvrXrl2TZs2aaezzl0xondavX6/7JEH5G0ja1urEiRPvPKZeSp81a1ZNMoalur9pByme6QGbGzRokPTo0UOzFumeNolMy4gHK6PUIUOG6AjGie7EIsqRlJQUnZHxJowWiwOwDtcmaVDxEyr4cDCnWgrFc8+uXbs0tDChAZe9wiL2CVthOGTo1avXjzVOIyHW8p7AY5h2buLEiS2YwGZnQ07FJOCMuY3Gf7Qw9oF8KAHatm2r1sYITobwLXzBIPvBDmol9gKb8IC6devqWIgAnZsROgZhEgImisIotk6E/SQOmBloamt7cdYvXrw4yxMVFbXEZKOZQ4cODfrFip29wwQyGQeEiTCOv8HO3f2Nvn3jCoe3ynfzgsW+qPHtHuw8D8xgXwJhgDNnzsjp06dTvTExMXEXLly4b6cEwQgKsVajTmLGZat9rrtRmC2oMQQYWJ+PxXP7RgqlYNTsmIHeXWQ3JnvbsWMHWT9eXxbHxsb+lpycnMR8nxcaAIfyGu6/IvYtmX0/YpiZYGLhGFUaWh82bNjvJnAu79+/fziBOFD/lRcUhl5u3bqlpZapINampqaOr1mz5mePr2IM2yKXLl06xbQSXYyGI4gFeVWMXjzGjd+asHPeVAp/zJw5c4edCP8twAAqcard2ZAiKQAAAABJRU5ErkJggg==)',
                '**TLP:WHITE - Disclosure is not limited.**'
            ]
        elif (tlp == 1):
            tlp = [
                '![TLP:GREEN](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE0AAAAeCAYAAABpE5PpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo4NjQ1RkVGRDg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo4NjQ1RkVGRTg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjg2NDVGRUZCODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjg2NDVGRUZDODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+I9OQdgAAB7xJREFUeNrsWWtsVEUU/u69e3fbbbvdtjxKW6gFjNUiQomBmgBaEoqNJfzAxMakEo0YH5UUEzFpDEL4gUUioAFRQ4JE0h8iKQ/9YSuiqKjFUqutD7DYggUKbXe3j32vc2Y7621pd+/u1h9a53aau3Nnvnvmm3POnDNXCgQCEKW1tTWrurp648mTJ8sNBkNWQkICJmvxer3oZ6WwsPCjrVu31pSWlp4TzyRB2s6dO0u2bdv2dkFBwayioiKkp6eDETdpSfP7/UQampubcebMmUFG2uYDBw68ZjQag6Tt27evZMOGDcerqqrU+fPnc5Z9Ph+0WjjZiiRJvBJJ169fR01NDZYvX76ltrb2FamlpWXqqlWrPi8vL89fsGABBgYG8H8ZWYg4h8OBLVu2+Hbt2lWsdHd3v8gYXbt69er/LGGKokBVVV7pnioVvZZEVmexWGicfOLEiRSFqd6ekpKS6ZmZmfyh3iJeKMsyV2Ntm/gdbdGOFzUeXJPJBIOioveaDZd+7sDFtt9xpb0L/b2DTHtMsFhTIMkS/D6/Lh9ntVpRX19vNdhstvyMjAzeqHditEEkJiQGhRreLALs8nl9cLqcGBoa4gugd5KESeSbzWYkmBI4phhLOC6XC0POIXg8Hl2YTCGYVhnx47dtOPVlAzrMbfDe0Q/5NgJkf53M5L6xYq7/bqwoXoG582ZzuQP+QFjSkpOTSc5pBqayPiakqldVU1JSkJKcwlV89BjVoCIxMZE/tzvs3NwjTZIwaEyqJZWPFwsgChFIoQ9h9g/2w2F3wB/wj4tL/b0uL95/5z18m/Qp0l8NIKuYZBvZz+XsQ/uJL/D6jq/xwLkyrCkvg0/xhdU6eidTlICsezdhV3paOqypVq4VxDxNeHSldiJU9I1EmCXFginpU/hkiQyqY2Gy1wf7ZkwZc8GE7/I4vXhzz140rWxAfkMAM1YSkUxTMLIaWQiasxaY+4UXDbOP4sC+g5Ch8LlFtAq95pNqTeXmI8iK1J/6kVbSRMfqT22k7qRhgig9fsWUYOILMpamGRQDDr17GJ2PNOOOrcNOnGvuGDIOP1OMwF1vA00LT6Pu8HHuciIVWQ9h5kQzksxJuv2edpJEHAmiJYXuyRQ5oQhEjUnmSoRrMekdTV/9gOaZpzF3M4vo9W4+w/X2PcBnQydx8ad2qEY1PtJoRUnAmINEdpE/Gl0Iczwz07OQyUnJodCBZPR5/Pi08RNMe4lbcnR4ZK6JzPw3unHq81NQJCV20kg4I9uFKLiLNTsg06Px5LO0YQppS6yYYgcXGkzkdV26iq5pvyCtKOizopaT1fRS4IL0A3qv28L6tvCksYtUNda4K/QSJgCRr50wtcWbpglMwrv8xxUEFrqhxIhFkpjMbFed04Orl6+Fzbsjmic517jzOHbJijxil4t3IYSGCfPs6b0JNTdeOVmZCfTdjEPTJEiYkCIFsaJ1+lFnE8oEAMkcLA6fRlG+3zchEyIcsQjR7sLjbVACh/CtqWnwdsQpJ/27wjYEqyWsjBE1zeP2xO17aDylQGKy4ugpHhMlTLfbHUq1smdmIdBiQDxL7GFw6oVUTM+ZFjYPlyOtppsh6c35xsVgk/N4Pbfkk/Fgkia43K7QKWv27CxM7ZgNewsgx4BJlt3bAMwaykdGZlrspIVOMAf64/JvNH60O6O2WM2USBsYHOBk0T0PjRJVLL2zGFd3RB+nScPZQc9OCfcXFSMgxeHThICDg4P8lCFSXjZWqEGTo1MPrVYJ7XP0O6LHlOTgWIdjBCZp7pLiezHn3CJc2s92/Sh9/4WXgUX9K1CwKD9k9jGTJvxHb18vF0zPJGky1I/ItvXZxjRDaqMTC0GcHlOlfmTmPT09t2gp97sMYt3jFTDX5KH9naDJyRFMkt76K8tTZ31wDx557GG4vW4dC6dDWJ6mMBu/cfMGn6T28FGMF/ciaLXb7Sx26gkfZrChfX19vBIJ42JKcsgku290j/CP2kLmmpphQdUzVcjatRCtjzL8tr8J0lYqN74D2h4C5tUtw7OVT0M1GSIeSNJ7DczJK3pObIXvoAmSBtGJh8loCgWYlC4JB0/P9W4eXOPYQpD508EApVci+OWhCsOkzYhMnLCpLRwumVZyWhKe31iJr+q/wem19fg97xLkQh8MM4IhGB1C+htVzOi6HU8sWYnChxdwwqnq2LEVA7vpsNlsc3JzcyMO0vojqkKzxARDx0ZSdEfTPAzxefnB5WhzJUxhilotDFfEBrH0wfuw5IF7cfnin7j82xXYv7fzdjrnm7k4B1l5mVBUJbQYkWSkhWMK0WMoKyv7uKmp6bnCwkI+WO8ktburdvVjDiOCacOEYdJ4p9PJx87Kz0FeQe6Ibw6kwUSux+nR/b2hsbERTLnOy+vXr3/j/PnzXe3t7Yj1i3q8eeQ/iSkCayKQNIUq3VOb3qCdkndSqLq6OqxZs2Y//1hcWVn51MGDB9+qrq5GTk4O7zARqc6/vQx/E+BE7969mz4BHjp79myFJNKRioqKqiNHjrxaWlqqLl68mH+umuxf2Ims1tZWHD16FNnZ2YeOHTv2ZF5enkvSElNbW7to+/btL3R2di5jP6fKwaBssjInMZ9nT0tLa1q3bt3eTZs2fSi+H/wlwACx5CRlwv4edAAAAABJRU5ErkJggg==)',
                '**TLP:GREEN - Limited disclosure, restricted to the community.**'
            ]
        elif (tlp == 2):
            tlp = [
                '![TLP:AMBER](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE0AAAAeCAYAAABpE5PpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo4NjQ1RkVGOTg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo4NjQ1RkVGQTg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjg2NDVGRUY3ODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjg2NDVGRUY4ODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+11CCTgAAB/hJREFUeNrsWntsU1UY//W2XR9by548BgxYwBcgj6HLYKgMIrAx4kCJIsIfLAFR/MNowBhQl2gioEQJ4CDEaIChCX84YdOgJBoSHjrZ4jJwqExCht0Ga7f2trdPz3e2U8tat9sWQ+I821nb23PP+c7v+32vc4dQKATRW1pacisqKnZptdp2jUYT0ul0w7YzDEIAegsKCj6rr68viMRJQ3+o7d69e/G2bdsO5ufnjy8uLkZ2djYYcBiujfbe29uLhoYGXLhwQV65cuUbhw4d2pWSktIHWnV19ZKNGzfWbtq0SV9UVIRgMIhAIIDh3iRJAmMd2trasGPHDpSWlr5VU1Pzpqa5uXnkokWLvl+xYsW98+bNg9PpxP/t9mY0GtHR0YGqqqrg/v37S7SdnZ1bfD7filWrVsHlct11AZkv4dqlTu/JTIQLuVvN7/cjMzOT5NDU1dVZtTabbc/SpUtH5ubm8i/VNrGRSL8X65pa/2EwGKDV6XHzlhNX/7Dh6rUb6OhywOcPIS0tFSaTgbuNeAGMHC/kCl/T0K86WWltAu7UqVMZOofD8UBWVpZqH0YLkq2bTCYYDUbOCFo3FAxx0D2KB4qi8EXUgEf3S5IW539swfmz38EYaMXYDAeyLUH0KsDlbhNs8jhMnFKIhSVzkW41w+NRVINFjpvMi14ljRQGQPEqbB4PmJWpkpPusVgstPdRbMcgtHRqAUtNTYUlzQI9YwUHK0KTJFxaahq8Pi96envgdrsHFYg20m134pNPj2Kc4SzefRqYPwMwWMlOaUHWPW5cabuCA19ewZ7d32JJ+Vo8XDCVzz2YnKQMq9UKs9EMjRRt4qT0IFOMLMtcViLNUOD1zxHSxUP1jPQMZipp/OZgKNi3qRja1ev1yMrM4sL09PTEFIbGdHTZUb3vA7xc1oYX1lC4IpD6e9imgCmTgZ2vAesab2DN2+/D7qjE4yVFMYEjGcjUMzMyOXDcpIOhmONILtoPKe9W9y3VrJPUMoy0Rguo8SsiCRxhHcEpPXA8mbfi9eNg9QFUPcMAW88uksXJZAcDJ2Pdy3oPMG0aUP+eD03fH0JD42UOzsB1SRnkeyiIkKxDyUljCDRSMt2jxmdKagAjsyOTHEqIqHuZhq0WKxcqUhja7BdffoPlMy/h2SfZBQeiWBuzMVDHjAU+esWLr2qPwSUrXAGRjj59RDp0Wl1cAYP2RWCTklXlb2oiGwGWSGN848438n7apK3Tjo6rp7F1XT+74mksK5pbCDx2/+84c/YnrpBI5VJwile5AjizyRyl4LhBE3RXM9FgwBGzyL/QHDRX08+/4JEHupAzhpKgBCb1AWsWA1cuNTAHHrrNuSOJyo8CBp8jWabxUC1JCQsiUhQCX3j269d+Q/H0GP5LbWM+btYUJlugDbdY9KX5qafoU5JKhIVShwoG0lAsIf+QdPHLfsjJUgswPyc7u5A/OkGW9QeHVEaIjFQZvU6Zb1IAhySLB62kTQ40De7QKUdE5s2ZwNKVfgyTmlPShKKYFcK/X3IN6dMCweRPO0Ro5xWAlpkqy15tdrUJT+wWYH7N6UmByWjAbWddSR5niXkSZxoTgBK+ZAtmut/n94U/54zKww+/qK1DYtVewG/tLHXzjWZJrCWslHhq52T2OyRoXq+XC5OoBoUgItv2M/CmTb0PX/9oQEBOkG1G4IszDPyxDzKm6cPsoFoyKaYxrGiOpKMnadAlu5ISxulyhrVHCpicPxay/iEcrmcXUuOcjAXhrj+BI6etKJ5XyJUhlCO75YQVTPcoPgVujzu5QCAmo4NJOr2IN/Wg8VQfUlEcKUiQ+cnl5eV4+8gI/NoaB3DaPtN8aRcwYepy5I0beZtJUtHt6HEkdDRFSnU4HHemjBI+qbu7m5uqWuB4fako6HZ0RyWcxI6JE0ZjQdl6rNpuQCv5N3Gy8Q+REua+tO75KuBXdwmeKF8YZUqcbUxBtHl6rwY8McZut3N571jB3ueL/Oi62RVmDZVHAxcQ+RJnJzNJGh8MBGOmLm63B/PnzkTh45tR9noG9n1M/oR9kdbPPFMfUPwzY9e588DCFzVodpViQ+UaxlZ/TFbQ2nS6Qkom5gl5Bo4Rsop9xeOCKH5p1WqE/BsdoRhlI8xmc1S14A/44VW8XAChtcHmlmU3ih6ejol523C4th4fnz6Lufc4MXMykM1qZ7cCtF4HzrTocN11Lx5dUIbCOVM54werL2lNkoFcCp3/UU1KSbqQRURa7jqYH1RzlhYBtqRjmfp1xp7x8fgBcpbUCbDIc3xaXGxG7XxkYjkMoY0bnkP7jVJcuvw7jjW3w+dlmpd0SM/IwaxF+XiKBQ/K8dREt0gl03kePYojOYWCxdM2kdepBYyUxVyLXbds2bK6pqamDTNmzOAX1QoUzr98vijaJ/LggnpOlgW5j83pV4KoJP7+XvGHEoqKYo1YJqq2kVU1NjYiLy+vUaqsrNxz8eLFzvb29qhDPbVCJQpWVJbPtE9MIrPxMCZ72Cu9vxMJdqSc8cpKLCXZTpw4gfLy8gP8YfFm1o4ePfrh9u3b+ZN1EjyRM6n/WiNwiWGExd69e4mtn507d+5pDhqZ5dq1a7ecPHnynYqKCmn27NnhZwHDGTDCpbW1FcePH0d6evrntbW16yZNmuTRRAJTU1NTtHPnzldtNtt8hnA2/SPIcAWNHgwz1+Bk0bdx9erV+7Zu3XqMRWGOx18CDACpC2hYo/a63gAAAABJRU5ErkJggg==)',
                '**TLP:AMBER - Limited disclosure, restricted to participants’ organizations.**'
            ]
        elif (tlp == 3):
            tlp = [
                '![TLP:RED](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE0AAAAeCAYAAABpE5PpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo5M0JERkQyQzg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo5M0JERkQyRDg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjg2NDVGRUZGODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjg2NDVGRjAwODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+gJJ1+QAAB8xJREFUeNrsWWtsU2UYfs5pu7Yr3R1QjLt4yzTMiT9UJg7xloGBEPZHEYVoCGjMcEFFwZhAQkIwIOJl3gCN/piLP3CgiyEishAhXuasDp0sOnGou7Zd165dL37Pt57ZLWt71s4fOr/mpO0553vP9z7f816PEolEoI22trbZW7du3dTU1HSP2Wy+zGg0Kpihg7i43e7uBQsWNO3YsWPPsmXLHNo1RQNtz549S3bu3PlGWVnZ5RUVFcjNzYWizFjM5BgaGkJrayuam5vdVVVVzxw8ePDFjIyMUdDq6uqWbNq06cPNmzdbBWgIBAIIhUKY6UNVVRCknp4e7N69G5WVlU/X19fvUhwOR87SpUtPrl69uoyAeb1e/D/GDwJH1m3fvj2wd+/exQaBYq04v3r58uW6ATMYDBA+D0aTSfxRoYiDv81CuEE1SH8Q6yv/7YNWZ7fbqbfhyJEjmcYTJ05UC5YhGAzqoivB6h4YwFdnv8PP7T/B29sPhMNQBGj2i+agtLQU15deDZvFCr/fPyXwtHv5nNhz2vlUfGzsXG1+KjKpS3l5OY4fP36L0eVyXZmXlyf0DiemqADFHxzBu0cO49yxk6h0+lEDO66ARXxUDGIIDlxAU9Mp7L4kF4tWLMUdN1bIzUjmH6kA2Wu1WkcZbDRCqDi2y/6AHz6fDyMjI7qVpEzem2nNhMVqgcloGptLXQMjASmTYOiRyTk2m433FgirMoW5yESMoE3/2d+Hutdew23tXTiEYszDLG15Y/dVCEU3iP+tXQOorXsbL3zfhg1rHoBRABIPOD6Xi8myZ2GydfCcxWKB3WaHx+thGjAGSCLACH52VrZwGeboKsfL5fVZtlkSOJfbJTc3GXi8LrCIqOO0nmRw0f1C6P59z+PpdhcOYr4AzEYORI9wzDH6vxy5+ARluOlkC/YfeJ1Pm3RBVC47Oxt5uXmSadxNzXRiD2kFYjqBzc/Pl+Ybb5PlJmTaUJBfIDc7HAnLYzKZ/M7MzERBwei9el2JmgxZKvzGO2+j9rxfsKgkBqiEZJbG9QquwbVn2tDQdFSyZaJydK4EQlNAj4lYha+Ml0NSBp+jXdcrk6ZLF8WN0zMnIWikcPPXX6Lky3Y8LkwSCE7FBctjv/B6XR8dR8dv5yVrNeW4swQsEp5alKWS9FM06YkKkoE0ydgAoFcmgdPmpgwad2okFMQXn36GJzAHQCrVQQSzRZi435uBT081w8QUJTroT6SZIZJSRNTmx54jmFMxs8k2Q898NVEu1vnH77Cfu4BK5OgwyfimWo189H57FoM+r9wMyqYZpZrLcR5ZSycfmzow+qaTHyqqIs0/ZaZxUZ0XujDfL9IBmNIpfYWBZiKve1BGYLKDjEvkzHUpKEAyZZjGmSZNLN0inUxLyzwHXC5cCuN0VHHIC0Tg9g6NMk1UDdPRDDAa/vaR0lQVpF2JUE6ytalJE8QZWGsqSBE0ApYjcqjfphQx4/s1p7CcWcLRUm4oHJqW2lRLmMkMLW1Jl8GUw7wuJdC4oMKL56FNuolgWvv2C3zom23H3NzRco3ZN7+VNHhMgFgKxSqrp35O5idjZU4ZNC6g+KJ56LtsLj6HK5klJ3xEI/qQM/8qZIk0gcpSNutJRqtUleOmsu8XW4QPDw+nxTTmjJSRMmiyfhNR7rrFi7AXf6bMMjcCeMvsw+JFtyAUwwSPx5OyiRIYz5BnXD3Lc16R0gRHgikBxwDg8+sr4NVk7ZDbbliIb8oK8TrOM15NCTCKfxLnYL/zZpQWlcguhaYgZQ96BsclqHqVG/YPS9AnKkcQnW6nLmc+cRPIfpfLlX7tKVs2YpEPPrAOz84JomEMOEWHWAXP4Eccu7YI961YJUGauFB2LMgYPWFeA4xy+vv7J2UpZbBr4XQ55RL1yqQ/7B/o19XpkHOS3UR2zJs9Bw8/VovaS8kckdkjEAVPjQKoRH8b5PErPFgFB96/vgS1Gx+RraF4/bqBgQE4nU55XQNPW5P2W54XH7Krt69XMireuqXpRu8jCHFlKqPnfcM+9PT26O6r8R6jAEVN1iSkwGIRSbc89RTeO9qIm06cwd0+BXchS5TxFlgFUG4RYX8QUbJRBI2TeVaU3b0KT9x6u4QzUVTjImimXLxsGIryimWWlkbwkE1Ir280eMRpM02USYfeHegek8kKR6tCmPKQDGQl79ObqsiIHQgYjOLH78KWs4uKihIq5xeRyma2YOO9a9C55Hac+bYFx9o7EOpzQhUONGQ1wzy3EFdcczUeLStHQXbO2IJ0+ZRQEO5B95if01o7sW2jqTh4bT7NX3MBmv/UNmNiGzyZPILs9XqdSnV19fO9vb2PrV+/Xr5x0VW+iF1j/RgSiyKYVJh1n4V1mzjHXUz3FeB0JKrTKZPNgObmZrS0tHysCrBeam1t7ero6JAX9AwykqgHBJNUFrmsJcXODUfpPh3vTP+JF9WpyiRBqFdjYyNWrlz5qnxZXFNT89CBAwfe3LZtG4qLi6UP+/9l8d8vi2mB+/bto288dPr06QeVqHPD2rVraxoaGp6rqqrKWLhwoWwZz+Qhk2WvFw6HA4cPH0ZhYeFbgmkbSkpKAkqso66vr1+wa9euzZ2dnYv5qkpEsaQvXv6rQ+CiCN/szs/Pb1m3bt3LW7Zs+YDtf46/BBgArHYm8MDFso4AAAAASUVORK5CYII=)',
                '**TLP:RED - Not for disclosure, restricted to participants only.**'
            ]
        else:
            tlp = "unknown"

        return tlp

    def getCaseSummary(self, data):

        startDate = time.strftime(
            '%Y-%m-%dT%H:%M:%SZ',
            time.localtime(data['startDate'] /
                           1000))  #Convert epoch ms to sec then human readable
        severity = self.getSummary(data['severity'])

        if (data['tags'].__len__() == 0):
            tags = ["No tags found"]

        else:
            tags = (data['tags'])

        caseSummary = [
            ' ', ' ', '**Severity** ',
            str(severity), '**Created By** ',
            str(data['createdBy']), '**Assignee** ',
            str(data['owner']), '**Tags** ',
            str(', '.join(tags))
        ]

        if data['status'] == 'Resolved':
            closeDate = time.strftime(
                '%Y-%m-%dT%H:%M:%SZ', time.localtime(
                    data['endDate'] /
                    1000))  #Convert epoch ms to sec then human readable
            caseSummary.extend([
                '**Case status:** ', 'Closed', '**Start Date**', startDate,
                '**Close Date:** ', closeDate, '**Resolution:** ',
                data['resolutionStatus'], '**Summary:** <br>', data['summary']
            ])

        else:
            caseSummary.extend(
                ['**Case status:** ', 'Open', '**Start Date**', startDate])

        return caseSummary

    def getCaseObservables(self, case_observables):

        case_observables_sorted = sorted(case_observables,
                                         key=lambda k: k['createdAt'])
        caseObservables = [
            'Created At', 'Data Type', 'Data', 'Sighted', 'IOC', 'Tags'
        ]

        for observable in case_observables_sorted:

            createdAt = time.strftime(
                '%Y-%m-%dT%H:%M:%SZ',
                time.localtime(
                    observable['createdAt'] /
                    1000))  #Convert epoch ms to sec then human readable
            caseObservables.append(createdAt)
            caseObservables.append(str(observable['dataType']))

            if (observable['dataType'] == 'file'):
                caseObservables.append(str(observable['attachment']['name']))
            else:
                caseObservables.append(
                    str(observable['data'].replace('\n', '<br>').replace(
                        '.', '[.]').replace('http', 'hxxp')))

            caseObservables.append(str(observable['sighted']))
            caseObservables.append(str(observable['ioc']))
            caseObservables.append(str(', '.join(observable['tags'])))

        return caseObservables

    def getCaseTasks(self, caseId):

        response = self.api.get_case_tasks(caseId)
        caseTasks = (json.dumps(response.json(), indent=4, sort_keys=True))
        allTaskIds = {}

        # Build a list of tasks that we want to get the details for
        for task in json.loads(caseTasks):

            if (task['title'] == 'Autogenerated Report') or (task['status']
                                                             == 'Cancel'):
                continue

            else:
                taskId = task['id']

                try:
                    if (task['description']):
                        allTaskIds[taskId] = {
                            'taskGroup': task['group'],
                            'taskTitle': task['title'],
                            'createdAt': task['createdAt'],
                            'createdBy': task['createdBy'],
                            'owner': task['owner'],
                            'status': task['status'],
                            'description': task['description']
                        }

                except KeyError:
                    allTaskIds[taskId] = {
                        'taskGroup': task['group'],
                        'taskTitle': task['title'],
                        'createdAt': task['createdAt'],
                        'createdBy': task['createdBy'],
                        'owner': task['owner'],
                        'status': task['status'],
                        'description': 'No description specified'
                    }

        return allTaskIds

    def getCaseTaskLog(self, taskLogId):

        response = self.api.get_task_logs(taskLogId)
        caseTaskLog = (json.dumps(response.json(), indent=4, sort_keys=True))

        return caseTaskLog

    def getTlpFooter(self):

        tlpFooter = [
            'Color', 'When should it be used?', 'How may it be shared',
            'TLP:RED   <br> ![TLP:RED](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE0AAAAeCAYAAABpE5PpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo5M0JERkQyQzg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo5M0JERkQyRDg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjg2NDVGRUZGODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjg2NDVGRjAwODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+gJJ1+QAAB8xJREFUeNrsWWtsU2UYfs5pu7Yr3R1QjLt4yzTMiT9UJg7xloGBEPZHEYVoCGjMcEFFwZhAQkIwIOJl3gCN/piLP3CgiyEishAhXuasDp0sOnGou7Zd165dL37Pt57ZLWt71s4fOr/mpO0553vP9z7f816PEolEoI22trbZW7du3dTU1HSP2Wy+zGg0Kpihg7i43e7uBQsWNO3YsWPPsmXLHNo1RQNtz549S3bu3PlGWVnZ5RUVFcjNzYWizFjM5BgaGkJrayuam5vdVVVVzxw8ePDFjIyMUdDq6uqWbNq06cPNmzdbBWgIBAIIhUKY6UNVVRCknp4e7N69G5WVlU/X19fvUhwOR87SpUtPrl69uoyAeb1e/D/GDwJH1m3fvj2wd+/exQaBYq04v3r58uW6ATMYDBA+D0aTSfxRoYiDv81CuEE1SH8Q6yv/7YNWZ7fbqbfhyJEjmcYTJ05UC5YhGAzqoivB6h4YwFdnv8PP7T/B29sPhMNQBGj2i+agtLQU15deDZvFCr/fPyXwtHv5nNhz2vlUfGzsXG1+KjKpS3l5OY4fP36L0eVyXZmXlyf0DiemqADFHxzBu0cO49yxk6h0+lEDO66ARXxUDGIIDlxAU9Mp7L4kF4tWLMUdN1bIzUjmH6kA2Wu1WkcZbDRCqDi2y/6AHz6fDyMjI7qVpEzem2nNhMVqgcloGptLXQMjASmTYOiRyTk2m433FgirMoW5yESMoE3/2d+Hutdew23tXTiEYszDLG15Y/dVCEU3iP+tXQOorXsbL3zfhg1rHoBRABIPOD6Xi8myZ2GydfCcxWKB3WaHx+thGjAGSCLACH52VrZwGeboKsfL5fVZtlkSOJfbJTc3GXi8LrCIqOO0nmRw0f1C6P59z+PpdhcOYr4AzEYORI9wzDH6vxy5+ARluOlkC/YfeJ1Pm3RBVC47Oxt5uXmSadxNzXRiD2kFYjqBzc/Pl+Ybb5PlJmTaUJBfIDc7HAnLYzKZ/M7MzERBwei9el2JmgxZKvzGO2+j9rxfsKgkBqiEZJbG9QquwbVn2tDQdFSyZaJydK4EQlNAj4lYha+Ml0NSBp+jXdcrk6ZLF8WN0zMnIWikcPPXX6Lky3Y8LkwSCE7FBctjv/B6XR8dR8dv5yVrNeW4swQsEp5alKWS9FM06YkKkoE0ydgAoFcmgdPmpgwad2okFMQXn36GJzAHQCrVQQSzRZi435uBT081w8QUJTroT6SZIZJSRNTmx54jmFMxs8k2Q898NVEu1vnH77Cfu4BK5OgwyfimWo189H57FoM+r9wMyqYZpZrLcR5ZSycfmzow+qaTHyqqIs0/ZaZxUZ0XujDfL9IBmNIpfYWBZiKve1BGYLKDjEvkzHUpKEAyZZjGmSZNLN0inUxLyzwHXC5cCuN0VHHIC0Tg9g6NMk1UDdPRDDAa/vaR0lQVpF2JUE6ytalJE8QZWGsqSBE0ApYjcqjfphQx4/s1p7CcWcLRUm4oHJqW2lRLmMkMLW1Jl8GUw7wuJdC4oMKL56FNuolgWvv2C3zom23H3NzRco3ZN7+VNHhMgFgKxSqrp35O5idjZU4ZNC6g+KJ56LtsLj6HK5klJ3xEI/qQM/8qZIk0gcpSNutJRqtUleOmsu8XW4QPDw+nxTTmjJSRMmiyfhNR7rrFi7AXf6bMMjcCeMvsw+JFtyAUwwSPx5OyiRIYz5BnXD3Lc16R0gRHgikBxwDg8+sr4NVk7ZDbbliIb8oK8TrOM15NCTCKfxLnYL/zZpQWlcguhaYgZQ96BsclqHqVG/YPS9AnKkcQnW6nLmc+cRPIfpfLlX7tKVs2YpEPPrAOz84JomEMOEWHWAXP4Eccu7YI961YJUGauFB2LMgYPWFeA4xy+vv7J2UpZbBr4XQ55RL1yqQ/7B/o19XpkHOS3UR2zJs9Bw8/VovaS8kckdkjEAVPjQKoRH8b5PErPFgFB96/vgS1Gx+RraF4/bqBgQE4nU55XQNPW5P2W54XH7Krt69XMireuqXpRu8jCHFlKqPnfcM+9PT26O6r8R6jAEVN1iSkwGIRSbc89RTeO9qIm06cwd0+BXchS5TxFlgFUG4RYX8QUbJRBI2TeVaU3b0KT9x6u4QzUVTjImimXLxsGIryimWWlkbwkE1Ir280eMRpM02USYfeHegek8kKR6tCmPKQDGQl79ObqsiIHQgYjOLH78KWs4uKihIq5xeRyma2YOO9a9C55Hac+bYFx9o7EOpzQhUONGQ1wzy3EFdcczUeLStHQXbO2IJ0+ZRQEO5B95if01o7sW2jqTh4bT7NX3MBmv/UNmNiGzyZPILs9XqdSnV19fO9vb2PrV+/Xr5x0VW+iF1j/RgSiyKYVJh1n4V1mzjHXUz3FeB0JKrTKZPNgObmZrS0tHysCrBeam1t7ero6JAX9AwykqgHBJNUFrmsJcXODUfpPh3vTP+JF9WpyiRBqFdjYyNWrlz5qnxZXFNT89CBAwfe3LZtG4qLi6UP+/9l8d8vi2mB+/bto288dPr06QeVqHPD2rVraxoaGp6rqqrKWLhwoWwZz+Qhk2WvFw6HA4cPH0ZhYeFbgmkbSkpKAkqso66vr1+wa9euzZ2dnYv5qkpEsaQvXv6rQ+CiCN/szs/Pb1m3bt3LW7Zs+YDtf46/BBgArHYm8MDFso4AAAAASUVORK5CYII=)',
            "Sources may use TLP\:RED when information cannot be effectively acted upon by additional parties, and could lead to impacts on a party's privacy, reputation, or operations if misused.",
            "Recipients may not share TLP\:RED information with any parties outside of the specific exchange, meeting, or conversation in which it was originally disclosed. In the context of a meeting, for example, TLP\:RED information is limited to those present at the meeting. In most circumstances, TLP\:RED should be exchanged verbally or in person.",
            'TLP:AMBER <br> ![TLP:AMBER](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE0AAAAeCAYAAABpE5PpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo4NjQ1RkVGOTg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo4NjQ1RkVGQTg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjg2NDVGRUY3ODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjg2NDVGRUY4ODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+11CCTgAAB/hJREFUeNrsWntsU1UY//W2XR9by548BgxYwBcgj6HLYKgMIrAx4kCJIsIfLAFR/MNowBhQl2gioEQJ4CDEaIChCX84YdOgJBoSHjrZ4jJwqExCht0Ga7f2trdPz3e2U8tat9sWQ+I821nb23PP+c7v+32vc4dQKATRW1pacisqKnZptdp2jUYT0ul0w7YzDEIAegsKCj6rr68viMRJQ3+o7d69e/G2bdsO5ufnjy8uLkZ2djYYcBiujfbe29uLhoYGXLhwQV65cuUbhw4d2pWSktIHWnV19ZKNGzfWbtq0SV9UVIRgMIhAIIDh3iRJAmMd2trasGPHDpSWlr5VU1Pzpqa5uXnkokWLvl+xYsW98+bNg9PpxP/t9mY0GtHR0YGqqqrg/v37S7SdnZ1bfD7filWrVsHlct11AZkv4dqlTu/JTIQLuVvN7/cjMzOT5NDU1dVZtTabbc/SpUtH5ubm8i/VNrGRSL8X65pa/2EwGKDV6XHzlhNX/7Dh6rUb6OhywOcPIS0tFSaTgbuNeAGMHC/kCl/T0K86WWltAu7UqVMZOofD8UBWVpZqH0YLkq2bTCYYDUbOCFo3FAxx0D2KB4qi8EXUgEf3S5IW539swfmz38EYaMXYDAeyLUH0KsDlbhNs8jhMnFKIhSVzkW41w+NRVINFjpvMi14ljRQGQPEqbB4PmJWpkpPusVgstPdRbMcgtHRqAUtNTYUlzQI9YwUHK0KTJFxaahq8Pi96envgdrsHFYg20m134pNPj2Kc4SzefRqYPwMwWMlOaUHWPW5cabuCA19ewZ7d32JJ+Vo8XDCVzz2YnKQMq9UKs9EMjRRt4qT0IFOMLMtcViLNUOD1zxHSxUP1jPQMZipp/OZgKNi3qRja1ev1yMrM4sL09PTEFIbGdHTZUb3vA7xc1oYX1lC4IpD6e9imgCmTgZ2vAesab2DN2+/D7qjE4yVFMYEjGcjUMzMyOXDcpIOhmONILtoPKe9W9y3VrJPUMoy0Rguo8SsiCRxhHcEpPXA8mbfi9eNg9QFUPcMAW88uksXJZAcDJ2Pdy3oPMG0aUP+eD03fH0JD42UOzsB1SRnkeyiIkKxDyUljCDRSMt2jxmdKagAjsyOTHEqIqHuZhq0WKxcqUhja7BdffoPlMy/h2SfZBQeiWBuzMVDHjAU+esWLr2qPwSUrXAGRjj59RDp0Wl1cAYP2RWCTklXlb2oiGwGWSGN848438n7apK3Tjo6rp7F1XT+74mksK5pbCDx2/+84c/YnrpBI5VJwile5AjizyRyl4LhBE3RXM9FgwBGzyL/QHDRX08+/4JEHupAzhpKgBCb1AWsWA1cuNTAHHrrNuSOJyo8CBp8jWabxUC1JCQsiUhQCX3j269d+Q/H0GP5LbWM+btYUJlugDbdY9KX5qafoU5JKhIVShwoG0lAsIf+QdPHLfsjJUgswPyc7u5A/OkGW9QeHVEaIjFQZvU6Zb1IAhySLB62kTQ40De7QKUdE5s2ZwNKVfgyTmlPShKKYFcK/X3IN6dMCweRPO0Ro5xWAlpkqy15tdrUJT+wWYH7N6UmByWjAbWddSR5niXkSZxoTgBK+ZAtmut/n94U/54zKww+/qK1DYtVewG/tLHXzjWZJrCWslHhq52T2OyRoXq+XC5OoBoUgItv2M/CmTb0PX/9oQEBOkG1G4IszDPyxDzKm6cPsoFoyKaYxrGiOpKMnadAlu5ISxulyhrVHCpicPxay/iEcrmcXUuOcjAXhrj+BI6etKJ5XyJUhlCO75YQVTPcoPgVujzu5QCAmo4NJOr2IN/Wg8VQfUlEcKUiQ+cnl5eV4+8gI/NoaB3DaPtN8aRcwYepy5I0beZtJUtHt6HEkdDRFSnU4HHemjBI+qbu7m5uqWuB4fako6HZ0RyWcxI6JE0ZjQdl6rNpuQCv5N3Gy8Q+REua+tO75KuBXdwmeKF8YZUqcbUxBtHl6rwY8McZut3N571jB3ueL/Oi62RVmDZVHAxcQ+RJnJzNJGh8MBGOmLm63B/PnzkTh45tR9noG9n1M/oR9kdbPPFMfUPwzY9e588DCFzVodpViQ+UaxlZ/TFbQ2nS6Qkom5gl5Bo4Rsop9xeOCKH5p1WqE/BsdoRhlI8xmc1S14A/44VW8XAChtcHmlmU3ih6ejol523C4th4fnz6Lufc4MXMykM1qZ7cCtF4HzrTocN11Lx5dUIbCOVM54werL2lNkoFcCp3/UU1KSbqQRURa7jqYH1RzlhYBtqRjmfp1xp7x8fgBcpbUCbDIc3xaXGxG7XxkYjkMoY0bnkP7jVJcuvw7jjW3w+dlmpd0SM/IwaxF+XiKBQ/K8dREt0gl03kePYojOYWCxdM2kdepBYyUxVyLXbds2bK6pqamDTNmzOAX1QoUzr98vijaJ/LggnpOlgW5j83pV4KoJP7+XvGHEoqKYo1YJqq2kVU1NjYiLy+vUaqsrNxz8eLFzvb29qhDPbVCJQpWVJbPtE9MIrPxMCZ72Cu9vxMJdqSc8cpKLCXZTpw4gfLy8gP8YfFm1o4ePfrh9u3b+ZN1EjyRM6n/WiNwiWGExd69e4mtn507d+5pDhqZ5dq1a7ecPHnynYqKCmn27NnhZwHDGTDCpbW1FcePH0d6evrntbW16yZNmuTRRAJTU1NTtHPnzldtNtt8hnA2/SPIcAWNHgwz1+Bk0bdx9erV+7Zu3XqMRWGOx18CDACpC2hYo/a63gAAAABJRU5ErkJggg==)',
            "Sources may use TLP\:AMBER when information requires support to be effectively acted upon, yet carries risks to privacy, reputation, or operations if shared outside of the organizations involved.",
            "Recipients may only share TLP\:AMBER information with members of their own organization, and with clients or customers who need to know the information to protect themselves or prevent further harm. **Sources are at liberty to specify additional intended limits of the sharing\: these must be adhered to.**",
            'TLP:GREEN <br> ![TLP:GREEN](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE0AAAAeCAYAAABpE5PpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo4NjQ1RkVGRDg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo4NjQ1RkVGRTg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjg2NDVGRUZCODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjg2NDVGRUZDODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+I9OQdgAAB7xJREFUeNrsWWtsVEUU/u69e3fbbbvdtjxKW6gFjNUiQomBmgBaEoqNJfzAxMakEo0YH5UUEzFpDEL4gUUioAFRQ4JE0h8iKQ/9YSuiqKjFUqutD7DYggUKbXe3j32vc2Y7621pd+/u1h9a53aau3Nnvnvmm3POnDNXCgQCEKW1tTWrurp648mTJ8sNBkNWQkICJmvxer3oZ6WwsPCjrVu31pSWlp4TzyRB2s6dO0u2bdv2dkFBwayioiKkp6eDETdpSfP7/UQampubcebMmUFG2uYDBw68ZjQag6Tt27evZMOGDcerqqrU+fPnc5Z9Ph+0WjjZiiRJvBJJ169fR01NDZYvX76ltrb2FamlpWXqqlWrPi8vL89fsGABBgYG8H8ZWYg4h8OBLVu2+Hbt2lWsdHd3v8gYXbt69er/LGGKokBVVV7pnioVvZZEVmexWGicfOLEiRSFqd6ekpKS6ZmZmfyh3iJeKMsyV2Ntm/gdbdGOFzUeXJPJBIOioveaDZd+7sDFtt9xpb0L/b2DTHtMsFhTIMkS/D6/Lh9ntVpRX19vNdhstvyMjAzeqHditEEkJiQGhRreLALs8nl9cLqcGBoa4gugd5KESeSbzWYkmBI4phhLOC6XC0POIXg8Hl2YTCGYVhnx47dtOPVlAzrMbfDe0Q/5NgJkf53M5L6xYq7/bqwoXoG582ZzuQP+QFjSkpOTSc5pBqayPiakqldVU1JSkJKcwlV89BjVoCIxMZE/tzvs3NwjTZIwaEyqJZWPFwsgChFIoQ9h9g/2w2F3wB/wj4tL/b0uL95/5z18m/Qp0l8NIKuYZBvZz+XsQ/uJL/D6jq/xwLkyrCkvg0/xhdU6eidTlICsezdhV3paOqypVq4VxDxNeHSldiJU9I1EmCXFginpU/hkiQyqY2Gy1wf7ZkwZc8GE7/I4vXhzz140rWxAfkMAM1YSkUxTMLIaWQiasxaY+4UXDbOP4sC+g5Ch8LlFtAq95pNqTeXmI8iK1J/6kVbSRMfqT22k7qRhgig9fsWUYOILMpamGRQDDr17GJ2PNOOOrcNOnGvuGDIOP1OMwF1vA00LT6Pu8HHuciIVWQ9h5kQzksxJuv2edpJEHAmiJYXuyRQ5oQhEjUnmSoRrMekdTV/9gOaZpzF3M4vo9W4+w/X2PcBnQydx8ad2qEY1PtJoRUnAmINEdpE/Gl0Iczwz07OQyUnJodCBZPR5/Pi08RNMe4lbcnR4ZK6JzPw3unHq81NQJCV20kg4I9uFKLiLNTsg06Px5LO0YQppS6yYYgcXGkzkdV26iq5pvyCtKOizopaT1fRS4IL0A3qv28L6tvCksYtUNda4K/QSJgCRr50wtcWbpglMwrv8xxUEFrqhxIhFkpjMbFed04Orl6+Fzbsjmic517jzOHbJijxil4t3IYSGCfPs6b0JNTdeOVmZCfTdjEPTJEiYkCIFsaJ1+lFnE8oEAMkcLA6fRlG+3zchEyIcsQjR7sLjbVACh/CtqWnwdsQpJ/27wjYEqyWsjBE1zeP2xO17aDylQGKy4ugpHhMlTLfbHUq1smdmIdBiQDxL7GFw6oVUTM+ZFjYPlyOtppsh6c35xsVgk/N4Pbfkk/Fgkia43K7QKWv27CxM7ZgNewsgx4BJlt3bAMwaykdGZlrspIVOMAf64/JvNH60O6O2WM2USBsYHOBk0T0PjRJVLL2zGFd3RB+nScPZQc9OCfcXFSMgxeHThICDg4P8lCFSXjZWqEGTo1MPrVYJ7XP0O6LHlOTgWIdjBCZp7pLiezHn3CJc2s92/Sh9/4WXgUX9K1CwKD9k9jGTJvxHb18vF0zPJGky1I/ItvXZxjRDaqMTC0GcHlOlfmTmPT09t2gp97sMYt3jFTDX5KH9naDJyRFMkt76K8tTZ31wDx557GG4vW4dC6dDWJ6mMBu/cfMGn6T28FGMF/ciaLXb7Sx26gkfZrChfX19vBIJ42JKcsgku290j/CP2kLmmpphQdUzVcjatRCtjzL8tr8J0lYqN74D2h4C5tUtw7OVT0M1GSIeSNJ7DczJK3pObIXvoAmSBtGJh8loCgWYlC4JB0/P9W4eXOPYQpD508EApVci+OWhCsOkzYhMnLCpLRwumVZyWhKe31iJr+q/wem19fg97xLkQh8MM4IhGB1C+htVzOi6HU8sWYnChxdwwqnq2LEVA7vpsNlsc3JzcyMO0vojqkKzxARDx0ZSdEfTPAzxefnB5WhzJUxhilotDFfEBrH0wfuw5IF7cfnin7j82xXYv7fzdjrnm7k4B1l5mVBUJbQYkWSkhWMK0WMoKyv7uKmp6bnCwkI+WO8ktburdvVjDiOCacOEYdJ4p9PJx87Kz0FeQe6Ibw6kwUSux+nR/b2hsbERTLnOy+vXr3/j/PnzXe3t7Yj1i3q8eeQ/iSkCayKQNIUq3VOb3qCdkndSqLq6OqxZs2Y//1hcWVn51MGDB9+qrq5GTk4O7zARqc6/vQx/E+BE7969mz4BHjp79myFJNKRioqKqiNHjrxaWlqqLl68mH+umuxf2Ims1tZWHD16FNnZ2YeOHTv2ZF5enkvSElNbW7to+/btL3R2di5jP6fKwaBssjInMZ9nT0tLa1q3bt3eTZs2fSi+H/wlwACx5CRlwv4edAAAAABJRU5ErkJggg==)',
            "Sources may use TLP\:GREEN when information is useful for the awareness of all participating organizations as well as with peers within the broader community or sector.",
            "Recipients may share TLP\:GREEN information with peers and partner organizations within their sector or community, but not via publicly accessible channels. Information in this category can be circulated widely within a particular community. TLP\:GREEN information may not be released outside of the community.",
            'TLP:WHITE <br> ![TLP:WHITE](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAE0AAAAeCAYAAABpE5PpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo5M0JERkQzMDg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo5M0JERkQzMTg4MEYxMUU2OUVDQkM2MTBGMkFBMjRDRCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjkzQkRGRDJFODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjkzQkRGRDJGODgwRjExRTY5RUNCQzYxMEYyQUEyNENEIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+49AQNQAACDpJREFUeNrsmWdoVFkUx8+M0dhb7L1jjw3sBUXBBmrAgHXVWFaCYhcRFesHQUVWDYkximj8ItGoWDb2ih17jYkEy9rFXvf+Dlx3jJk3kzf7abMHhkzevPt/957zP/V5vn//LlaeP38evmjRouitW7dGZ2Zm1gsLC/Oay//ckIfky5cvntKlSz/s1q3brrlz5yY2atToL/ubxypt//79jUaNGrWmePHiHdu0aSMVK1aUvCwej0devnwp58+flxs3bmRMnz590tSpU7f9UFpaWlrjqKiotIEDB5bv3LmzfPv2Tb5+/Sp5Xbxerxhvk7t378ry5ctlypQpw2bPnr3R8+TJk/zt27f/s0uXLp1R2Js3b+R/+VkKFiwojx49kiVLlrxISUlpm+/z588jzIUJ0dHR8vbt26Cpmz9/fgkPD1dL2A/XsA5MDdW6fPLly6fP8o27bjHB8sVEgsU18U0iIiLE6KrQwYMHxWP+STMK69a6dWv58OFDUFr/+PGjXL9+HV+XFy9e6MPZSKlSpaR+/frSoEEDVWgweNYIBQoU0O+G+WpVDMgh2SzxtXDhwvLp06dchQ2MCAZ7BPPVq1d6vUSJEopZsmRJVYhRRkAslA2OcdObYQaoVtmyZQNuBmtxsGPHjgnarlatmuDODRs2VEW+f/9eFXno0CHZvn27dO3aVTp27KgHdWKeZcDZs2fl5MmT+r1q1apSpkwZVdzNmzfl8ePHUrlyZenevbsqMZAxMAJ7unPnju6VkANm+fLl9ffLly/L3r171RAmO0rt2rUV04l5nKFYsWLsNyLMKOI7FnFawMFYtGrVKv2elJQkHTp0yPHeOXPmyNGjR2XGjBly6dIlGT16tK7JySg8lwNt2LBBTHqXBQsWiImtUqhQoZ/uS09Pl3Xr1smaNWv0d4yBkZxcMTk5WR48eCCxsbEyYMAAxfcVU16JKa30TFWqVBGTCHWPTgYGGw8KWIdxI7JixQpp2rSpKsSfwqxwKO6LjIyUlStXqkEsjhUYRUrnd+Lpvn37pGfPnr8oDKlVq5YsXLhQdu7cKRcvXpTU1FRlSU4M4zmrV69WRuIVMTExvygM4RoGPXLkiLprXFycrrXxzonF3kC+jGY3b94szZs3l4SEBLViMMJ98fHx0qRJE7U6OL4Pxqr8Pm3aNP0EI/Xq1ZPdu3dLRkaGHD9+XF0we7xlr40bN5aNGzdK0aJFA2KaulQ2bdqksXjLli2/YGYXJUCgQEqcevbsmdLYjbDu6dOnigOePdyePXukU6dOMmbMmFzhwaDExESNnQR2y2DiLeGAOIgb51ZY8/r1a7ly5cqPpOTX+wKx5cCBAzJu3DilsBshQ40fP14DMngckix07949MYWiK0zCRJ8+fVRxvgfkf1gbiC05Ce5OHD58+HBAF/U6xTIYhuX69esXUp3EenDAw03JXi1atNCM5lYGDx6sbkp2Jj4S9MHu3bu3a0xiKlgPHz50DENeJ5ZlZWVpaVGhQoWQlEaqr169uuLhopmZmWK6kJAwiVuUALg+mCiQa76x003lT43J/lwrDWZQH/0bQjFJmof6lBl16tQJCQ9GUF+SgW0BW6NGjZD3iXHZZ/ZsH3RMI8MFCoq5OaStBamFQmGEr2F9MW2iCUU4b6A20OtUAZOOaT/+DaE9KlKkiB6SoHv//v2QMWEXWOwVbDqHUIXzUqo4FfteJ5ZVqlRJDxdsD+lPWE+cAA/ccuXKadsUihAfCR+4KMmApHL16tWQlQYGHYJTW+nINOIQByYNhyJU3eCAR7NPwKZaD8UYdAd4Aqyg6SYWkfXoGNzKuXPnlK1guVIa9CQOtWrVSludUIT1LVu2VDwOSEZG6CfdCImEApd2DTz2ajPfsmXLXO+TtdSAxDVX7onACkoDpgX0c26Edbdv39Z+FTw7n+rbt69ukpottzJ58mQdQ5GBwbJ7ZbICWxgo5FbWrl2rLGVyY/fpSmm20R4xYoTMnz9fpwK5kW3btunkgvXgWOtxUFyVYpJmnfYnWJk0aZKcOnVK1/m6t93r8OHDZd68edqDBiv0ngwEsu/Tb9Y2VJxgaF6aQjGnVMs1WiGsCvC7d++kXbt2jsUfgZl7UTS9JTGCa9mnodRVTDW4lxjCVMRf2QCDRo4cqfP6sWPH5jgh5n/aPZpvZvrXrl2TZs2aaezzl0xondavX6/7JEH5G0ja1urEiRPvPKZeSp81a1ZNMoalur9pByme6QGbGzRokPTo0UOzFumeNolMy4gHK6PUIUOG6AjGie7EIsqRlJQUnZHxJowWiwOwDtcmaVDxEyr4cDCnWgrFc8+uXbs0tDChAZe9wiL2CVthOGTo1avXjzVOIyHW8p7AY5h2buLEiS2YwGZnQ07FJOCMuY3Gf7Qw9oF8KAHatm2r1sYITobwLXzBIPvBDmol9gKb8IC6devqWIgAnZsROgZhEgImisIotk6E/SQOmBloamt7cdYvXrw4yxMVFbXEZKOZQ4cODfrFip29wwQyGQeEiTCOv8HO3f2Nvn3jCoe3ynfzgsW+qPHtHuw8D8xgXwJhgDNnzsjp06dTvTExMXEXLly4b6cEwQgKsVajTmLGZat9rrtRmC2oMQQYWJ+PxXP7RgqlYNTsmIHeXWQ3JnvbsWMHWT9eXxbHxsb+lpycnMR8nxcaAIfyGu6/IvYtmX0/YpiZYGLhGFUaWh82bNjvJnAu79+/fziBOFD/lRcUhl5u3bqlpZapINampqaOr1mz5mePr2IM2yKXLl06xbQSXYyGI4gFeVWMXjzGjd+asHPeVAp/zJw5c4edCP8twAAqcard2ZAiKQAAAABJRU5ErkJggg==)',
            "Sources may use TLP\:WHITE when information carries minimal or no foreseeable risk of misuse, in accordance with applicable rules and procedures for public release.",
            "Subject to standard copyright rules, TLP\:WHITE information may be distributed without restriction."
        ]

        return tlpFooter

    def addTask(self, caseId):

        response = self.api.create_case_task(
            caseId,
            CaseTask(title='Autogenerated Report',
                     startDate=int(time.time()) * 1000))
        if response.status_code == 201:
            return (json.dumps(response.json(), indent=4, sort_keys=True))

        else:
            self.error('ko: {}/{}'.format(response.status_code, response.text))

    def addTaskLog(self, taskId, filename):

        response = self.api.create_task_log(
            taskId, CaseTaskLog(message='Autogenerated report', file=filename))
        if response.status_code == 201:
            return (json.dumps(response.json(), indent=4, sort_keys=True))

        else:
            self.error('ko: {}/{}'.format(response.status_code, response.text))

    def run(self):
        Responder.run(self)

        caseNumber = self.get_param('data.caseId')  #Friendly case number
        caseId = self.get_param('data.id')  #Raw case number
        case_observables = self.api.get_case_observables(caseId).json()
        title = self.get_param('data.title', None, 'title is missing')
        description = self.get_param('data.description', None,
                                     'description is missing')
        tags = self.get_param('data.tags')
        data = self.get_param('data')
        tlp = self.getTLP(data['tlp'])

        # Title
        #mdFile = MdUtils(file_name=str(caseNumber),title=tlp[0] + ' Case #' + str(caseNumber) + ': ' + title)
        mdFile = MdUtils(file_name=str(self.tmpPath) + str(caseNumber),
                         title=tlp[0] + ' Case #' + str(caseNumber) + ': ' +
                         title)

        # Case Summary
        caseSummary = self.getCaseSummary(data)
        mdFile.new_header(level=1, title='Case Summary')
        mdFile.new_line(str(tlp[1]))

        mdFile.new_table(columns=2,
                         rows=int(caseSummary.__len__() / 2),
                         text=caseSummary,
                         text_align='left')

        # Case Description
        mdFile.new_line('<div style="page-break-after: always;"></div>')
        mdFile.new_line(' ')
        mdFile.new_header(level=1, title='Case Description')
        mdFile.new_line(str(data['description']))
        mdFile.new_line(' ')

        # Task Log
        allTaskIds = self.getCaseTasks(caseId)
        allTaskIds_sorted = sorted(allTaskIds.items(),
                                   key=lambda x: x[1]['createdAt'])
        mdFile.new_header(level=1, title='Task Log Entries')

        for task in allTaskIds_sorted:
            title = str(task[1]['taskGroup'] + ' \: ' + task[1]['taskTitle'])
            createdAt = time.strftime(
                '%Y-%m-%dT%H:%M:%SZ',
                time.localtime(
                    task[1]['createdAt'] /
                    1000))  #Convert epoch ms to sec then human readable
            mdFile.new_header(level=2, title=title)
            mdFile.new_line(str('**Created At:** ') + str(createdAt))
            mdFile.new_line(
                str('**Created By:** ') + str(task[1]['createdBy']))
            mdFile.new_line(str('**Assigned To:** ') + str(task[1]['owner']))
            mdFile.new_line(str('**Case Status:** ') + str(task[1]['status']))
            mdFile.new_line(' ')
            mdFile.new_line(str('**Description:** '))
            mdFile.new_line(str(task[1]['description']))
            mdFile.new_line(' ')

            caseTaskLog = self.getCaseTaskLog(task[0])
            caseTaskLogEntries = (json.loads(caseTaskLog))
            caseTaskLogEntries_sorted = sorted(caseTaskLogEntries,
                                               key=lambda k: k['createdAt'])

            for caseTaskLogEntry in caseTaskLogEntries_sorted:

                createdAt = time.strftime(
                    '%Y-%m-%dT%H:%M:%SZ',
                    time.localtime(
                        caseTaskLogEntry['createdAt'] /
                        1000))  #Convert epoch ms to sec then human readable
                mdFile.new_line(
                    str(createdAt) + ' : ' + str(caseTaskLogEntry['message']))

        # Case Observables
        mdFile.new_header(level=1, title='Case Observables')
        caseObservables = self.getCaseObservables(case_observables)
        mdFile.new_table(columns=6,
                         rows=int(caseObservables.__len__() / 6),
                         text=caseObservables,
                         text_align='left')

        # TLP Protocol description
        mdFile.new_line('<div style="page-break-after: always;"></div>')
        mdFile.new_line(' ')
        mdFile.new_header(
            level=1,
            title='Traffic Light Protocol (TLP) Definitions and Usage')
        tlpFooter = self.getTlpFooter()
        mdFile.new_table(columns=3, rows=5, text=tlpFooter, text_align='left')

        # Build TOC
        mdFile.new_table_of_contents(table_title='Table of Contents', depth=2)

        # Compile the report
        mdFile.create_md_file()

        # Add the report to the case
        addTask = json.loads(self.addTask(caseId))
        taskId = addTask['_id']

        # Add the MD file to the task
        addTaskLog = json.loads(
            self.addTaskLog(taskId,
                            str(self.tmpPath) + str(caseNumber) + '.md'))

        # Cleanup the MD file
        os.remove(str(self.tmpPath) + str(caseNumber) + '.md')

        self.report({'report': 'created'})
Пример #7
0
class TheHive(AppBase):
    """
    An example of a Walkoff App.
    Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes.
    """

    __version__ = "1.0.0"
    app_name = "thehive"

    def __init__(self, redis, logger, console_logger=None):
        """
        Each app should have this __init__ to set up Redis and logging.
        :param redis:
        :param logger:
        :param console_logger:
        """
        super().__init__(redis, logger, console_logger)

    # async def run_analyzer(self, apikey, url, title_query):
    #    self.thehive = TheHiveApi(url, apikey, cert=False)

    #    response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[])
    #    return response.text

    async def search_cases(self, apikey, url, title_query):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        response = self.thehive.find_cases(query=ContainsString(
            "title", title_query),
                                           range="all",
                                           sort=[])
        return response.text

    async def search_query(self, apikey, url, search_for, custom_query):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        try:
            query = json.loads(custom_query)
        except:
            raise IOError("Invalid JSON payload received.")

        if search_for == "alert":
            response = self.thehive.find_alerts(query=query,
                                                range="all",
                                                sort=[])
        else:
            response = self.thehive.find_cases(query=query,
                                               range="all",
                                               sort=[])

        if response.status_code == 200:
            return response.text
        else:
            raise IOError(response.text)

    async def add_observable(self, apikey, url, case_id, data, datatype, tags):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        item = thehive4py.models.CaseObservable(
            dataType=datatype,
            data=data,
            tlp=1,
            ioc=False,
            sighted=False,
            tags=["Shuffle"],
            message="Created by shuffle",
        )

        return self.thehive.create_case_observable(case_id, item).text

    async def search_alerts(self,
                            apikey,
                            url,
                            title_query,
                            search_range="0-25"):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        # Could be "all" too
        if search_range == "":
            search_range = "0-25"

        response = self.thehive.find_alerts(query=ContainsString(
            "title", title_query),
                                            range=search_range,
                                            sort=[])
        return response.text

    async def create_case(self,
                          apikey,
                          url,
                          title,
                          description="",
                          tlp=1,
                          severity=1,
                          tags=""):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-2, not %s" % tlp
            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 0-2, not %s" % tlp

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 2 or severity < 0:
            return "Severity needs to be a number from 0-2, not %d" % tlp

        case = thehive4py.models.Case(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
        )

        try:
            ret = self.thehive.create_case(case)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert(
        self,
        apikey,
        url,
        type,
        source,
        sourceref,
        title,
        description="",
        tlp=1,
        severity=1,
        tags="",
    ):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-3, not %s" % tlp

            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 1-3, not %s" % severity

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 3 or severity < 1:
            return "Severity needs to be a number from 1-3, not %d" % severity

        alert = thehive4py.models.Alert(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
            type=type,
            source=source,
            sourceRef=sourceref,
        )

        try:
            ret = self.thehive.create_alert(alert)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert_artifact(self,
                                    apikey,
                                    url,
                                    alert_id,
                                    dataType,
                                    data,
                                    message=None,
                                    tlp="2",
                                    ioc="False",
                                    sighted="False",
                                    ignoreSimilarity="False",
                                    tags=None):
        self.thehive = TheHiveApi(url, apikey, cert=False, version=4)

        if tlp:
            tlp = int(tlp)
        else:
            tlp = 2

        ioc = ioc.lower().strip() == "true"
        sighted = sighted.lower().strip() == "true"
        ignoreSimilarity = ignoreSimilarity.lower().strip() == "true"

        if tags:
            tags = [x.strip() for x in tags.split(",")]
        else:
            tags = []

        alert_artifact = thehive4py.models.AlertArtifact(
            dataType=dataType,
            data=data,
            message=message,
            tlp=tlp,
            ioc=ioc,
            sighted=sighted,
            ignoreSimilarity=ignoreSimilarity,
            tags=tags)

        try:
            ret = self.thehive.create_alert_artifact(alert_id, alert_artifact)
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e
        if ret.status_code > 299:
            raise ConnectionError(ret.text)

        return ret.text

    # Gets an item based on input. E.g. field_type = Alert
    async def get_item(self, apikey, url, field_type, cur_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        newstr = ""
        ret = ""
        if field_type.lower() == "alert":
            ret = self.thehive.get_alert(cur_id + "?similarity=1")
        elif field_type.lower() == "case":
            ret = self.thehive.get_case(cur_id)
        elif field_type.lower() == "case_observables":
            ret = self.thehive.get_case_observables(cur_id)
        elif field_type.lower() == "case_task":
            ret = self.thehive.get_case_task(cur_id)
        elif field_type.lower() == "case_tasks":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "case_template":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "linked_cases":
            ret = self.thehive.get_linked_cases(cur_id)
        elif field_type.lower() == "task_log":
            ret = self.thehive.get_task_log(cur_id)
        elif field_type.lower() == "task_logs":
            ret = self.thehive.get_task_logs(cur_id)
        else:
            return (
                "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info."
                % field_type)

        return ret.text

    async def close_alert(self, apikey, url, alert_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        return self.thehive.mark_alert_as_read(alert_id).text

    async def reopen_alert(self, apikey, url, alert_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        return self.thehive.mark_alert_as_unread(alert_id).text

    async def create_case_from_alert(self,
                                     apikey,
                                     url,
                                     alert_id,
                                     case_template=None):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        response = self.thehive.promote_alert_to_case(
            alert_id=alert_id, case_template=case_template)
        return response.text

    async def merge_alert_into_case(self, apikey, url, alert_id, case_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        req = url + f"/api/alert/{alert_id}/merge/{case_id}"
        ret = requests.post(req, auth=self.thehive.auth)
        return ret.text

    # Not sure what the data should be
    async def update_field(self, apikey, url, field_type, cur_id, field, data):
        # This is kinda silly but..
        if field_type.lower() == "alert":
            newdata = {}

            if data.startswith("%s"):
                ticket = self.thehive.get_alert(cur_id)
                if ticket.status_code != 200:
                    pass

                newdata[field] = "%s%s" % (ticket.json()[field], data[2:])
            else:
                newdata[field] = data

            # Bleh
            url = "%s/api/alert/%s" % (url, cur_id)
            if field == "status":
                if data == "New" or data == "Updated":
                    url = "%s/markAsUnread" % url
                elif data == "Ignored":
                    url = "%s/markAsRead" % url

                ret = requests.post(
                    url,
                    headers={
                        "Content-Type": "application/json",
                        "Authorization": "Bearer %s" % apikey,
                    },
                )
            else:
                ret = requests.patch(
                    url,
                    headers={
                        "Content-Type": "application/json",
                        "Authorization": "Bearer %s" % apikey,
                    },
                    json=newdata,
                )

            return str(ret.status_code)
        else:
            return (
                "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info."
                % field_type)

    # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex
    async def run_analyzer(self, apikey, url, cortex_id, analyzer_id,
                           artifact_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        return self.thehive.run_analyzer(cortex_id, artifact_id,
                                         analyzer_id).text

    # Creates a task log in TheHive with file
    async def create_task_log(self,
                              apikey,
                              url,
                              task_id,
                              message,
                              filedata={}):
        if filedata["success"] == False:
            return "No file to upload. Skipping message."

        headers = {
            "Authorization": "Bearer %s" % apikey,
        }

        files = {}
        if len(filedata["data"]) > 0:
            files = {
                "attachment": (filedata["filename"], filedata["data"]),
            }

        data = {"_json": """{"message": "%s"}""" % message}
        response = requests.post(
            "%s/api/case/task/%s/log" % (url, task_id),
            headers=headers,
            files=files,
            data=data,
        )
        return response.text

    # Creates an observable as a file in a case
    async def create_case_file_observable(self, apikey, url, case_id, tags,
                                          filedata):
        if filedata["success"] == False:
            return "No file to upload. Skipping message."

        headers = {
            "Authorization": "Bearer %s" % apikey,
        }

        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]

        files = {}
        if len(filedata["data"]) > 0:
            files = {
                "attachment": (filedata["filename"], filedata["data"]),
            }

        outerarray = {"dataType": "file", "tags": tags}
        data = {"_json": """%s""" % json.dumps(outerarray)}
        response = requests.post(
            "%s/api/case/%s/artifact" % (url, case_id),
            headers=headers,
            files=files,
            data=data,
        )
        return response.text
Пример #8
0
class TheHive(AppBase):
    """
    An example of a Walkoff App.
    Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes.
    """
    __version__ = "1.0.0"
    app_name = "thehive"

    def __init__(self, redis, logger, console_logger=None):
        """
        Each app should have this __init__ to set up Redis and logging.
        :param redis:
        :param logger:
        :param console_logger:
        """
        super().__init__(redis, logger, console_logger)

    #async def run_analyzer(self, apikey, url, title_query):
    #    self.thehive = TheHiveApi(url, apikey)

    #    response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[])
    #    return response.text


    async def search_cases(self, apikey, url, title_query):
        self.thehive = TheHiveApi(url, apikey)

        response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[])
        return response.text

    async def add_observable(self, apikey, url, case_id, data, datatype, tags):
        self.thehive = TheHiveApi(url, apikey)

        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = []
        else:
            tags = []

        item = thehive4py.models.CaseObservable(
            dataType=datatype,
            data=data,
            tlp=1,
            ioc=False,
            sighted=False,
            tags=["Shuffle"],
            message="Created by shuffle",
        )

        return self.thehive.create_case_observable(case_id, item).text

    async def search_alerts(self, apikey, url, title_query, search_range="0-25"):
        self.thehive = TheHiveApi(url, apikey)

        # Could be "all" too
        if search_range == "":
            search_range = "0-25"

        response = self.thehive.find_alerts(query=String("title:'%s'" % title_query), range=search_range, sort=[])
        return response.text

    async def create_case(self, apikey, url, title, description="", tlp=1, severity=1, tags=""):
        self.thehive = TheHiveApi(url, apikey)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = []
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-2, not %s" % tlp
            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 0-2, not %s" % tlp

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 2 or severity < 0:
            return "Severity needs to be a number from 0-2, not %d" % tlp

        case = thehive4py.models.Case(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
        )

        try:
            ret = self.thehive.create_case(case)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert(self, apikey, url, type, source, sourceref, title, description="", tlp=1, severity=1, tags=""):
        self.thehive = TheHiveApi(url, apikey)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = []
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-2, not %s" % tlp
            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 0-2, not %s" % tlp

            severity = int(severity)

        if tlp > 2 or tlp < 0:
            return "TLP needs to be a number from 0-2, not %d" % tlp
        if severity > 2 or severity < 0:
            return "Severity needs to be a number from 0-2, not %d" % tlp

        alert = thehive4py.models.Alert(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
            type=type,
            source=source,
            sourceRef=sourceref,
        )

        try:
            ret = self.thehive.create_alert(alert)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    # Gets an item based on input. E.g. field_type = Alert
    async def get_item(self, apikey, url, field_type, cur_id): 
        self.thehive = TheHiveApi(url, apikey)

        newstr = ""
        ret = ""
        if field_type.lower() == "alert":
            ret = self.thehive.get_alert(cur_id + "?similarity=1") 
        elif field_type.lower() == "case":
            ret = self.thehive.get_case(cur_id)
        elif field_type.lower() == "case_observables":
            ret = self.thehive.get_case_observables(cur_id)
        elif field_type.lower() == "case_task":
            ret = self.thehive.get_case_task(cur_id)
        elif field_type.lower() == "case_tasks":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "case_template":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "linked_cases":
            ret = self.thehive.get_linked_cases(cur_id)
        elif field_type.lower() == "task_log":
            ret = self.thehive.get_task_log(cur_id)
        elif field_type.lower() == "task_logs":
            ret = self.thehive.get_task_logs(cur_id)
        else:
            return "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info." % field_type

        return ret.text

    async def close_alert(self, apikey, url, alert_id):
        self.thehive = TheHiveApi(url, apikey)
        return self.thehive.mark_alert_as_read(alert_id).text

    async def reopen_alert(self, apikey, url, alert_id):
        self.thehive = TheHiveApi(url, apikey)
        return self.thehive.mark_alert_as_unread(alert_id).text

    async def create_case_from_alert(self, apikey, url, alert_id, case_template=None):
        self.thehive = TheHiveApi(url, apikey)
        response = self.thehive.promote_alert_to_case(alert_id=alert_id, case_template=case_template)
        return response.text

    async def merge_alert_into_case(self, apikey, url, alert_id, case_id):
        self.thehive = TheHiveApi(url, apikey)
        req = url + f"/api/alert/{alert_id}/merge/{case_id}"
        ret = requests.post(req, auth=self.thehive.auth)
        return ret.text

    # Not sure what the data should be
    async def update_field(self, apikey, url, field_type, cur_id, field, data):
        # This is kinda silly but..
        if field_type.lower() == "alert":
            newdata = {}

            if data.startswith("%s"): 
                ticket = self.thehive.get_alert(cur_id)
                if ticket.status_code != 200:
                    pass 
            
                newdata[field] = "%s%s" % (ticket.json()[field], data[2:])
            else:
                newdata[field] = data

            # Bleh
            url = "%s/api/alert/%s" % (url, cur_id)
            if field == "status":
                if data == "New" or data == "Updated":
                    url = "%s/markAsUnread" % url
                elif data == "Ignored": 
                    url = "%s/markAsRead" % url

                ret = requests.post(
                    url,
                    headers={
                        'Content-Type': 'application/json',
                        'Authorization': 'Bearer %s' % apikey
                    }
                )
            else:
                ret = requests.patch(
                    url,
                    headers={
                        'Content-Type': 'application/json',
                        'Authorization': 'Bearer %s' % apikey
                    }, 
                    json=newdata,
                )

            return str(ret.status_code)
        else:
            return "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type

    # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex
    async def run_analyzer(self, apikey, url, cortex_id, analyzer_id, artifact_id):
        self.thehive = TheHiveApi(url, apikey)
        return self.thehive.run_analyzer(cortex_id, artifact_id, analyzer_id).text
Пример #9
0
            api.abort(400, "Received data was NOT JSON!")
        try:
            case = request.json
            case_id = case['id']
            log.info("thehive_case: {}".format(json.dumps(case, indent=4)))

            # Instantiate TheHive4py API
            hive_api = TheHiveApi(
                '{proto}://{host}:{port}'.format(
                    proto=("https" if CONFIG["hive_server_use_ssl"] else "http"),
                    host=CONFIG["hive_server"],
                    port=CONFIG["hive_port"]
                ), CONFIG["hive_api_key"])

            # Retrieve Observables in a separate API call (as they're not included in responder)
            observables_response = hive_api.get_case_observables(case_id)

            # Add observables to thehive:case as its own sub-dict
            case['observables'] = observables_response.json()

            strings = []
            for o in case["observables"]:
                # FIXME: Implement backend str type determination.
                strings.append(YaraString("observable_{md5sum}".format(
                    md5sum=md5(o["data"].encode("utf-8")).hexdigest()), o["data"]))

            # Append additional strings if specified in config.
            strings.extend(
                [
                    YaraString(
                        "observable_{md5sum}".format(
Пример #10
0
class TheHiveScheduledSearches:
    def __init__(self, TheHive, QRadar):
        #Retrieve enabled datatypes from config
        self.qr_enabled_datatypes = QRadar['enabled_datatypes']

        #Add epoch in milliseconds of current time to a variable
        self.current_time = int(round(time.time() * 1000))

        #Assign The Hive API class
        self.thapi = TheHiveApi(TheHive.get('url', None), TheHive.get('key'),
                                TheHive.get('password', None),
                                TheHive.get('proxies'), TheHive.get('verify'))

    #Generic function to check the response from the hive
    def check_response(self, response):
        logger.debug('API TheHive - status code: {}'.format(
            response.status_code))
        if response.status_code > 299:
            logger.error('API TheHive - raw error output: {}'.format(
                response.raw.read()))
        logger.debug('Response: %s' % response.text)

    def observable_search(self):
        #Search for cases with first_searched
        logger.info('Searching for matching cases')
        self.query = Contains('customFields.firstSearched.date')
        self.response = self.thapi.find_cases(query=self.query)
        logger.debug('Response: %s' % self.response.text)

        #Compare first_searched and last_searched. If longer than 60 days. Do not search.
        for case_data in self.response.json():
            queue_item = {}
            #Create observable queue item
            queue_item['action'] = "search_observables"
            queue_item['data'] = case_data
            #Add case to the queue
            self.thapi_queue(queue_item)
        self.process_queue()
        while q.qsize() > 0:
            logger.info('Current queue size(%i)' % q.qsize())
            time.sleep(60)

    #Define the logic that makes it possible to perform asynchronous requests to The Hive in order to speed up the integration
    def thapi_queue(self, queued_request):

        #Build up the queue
        logger.info(
            'Adding action: %s to queue for: %s (Current queue length: %i)' %
            (queued_request['action'], queued_request['data']['id'],
             q.qsize()))
        q.put(queued_request)

    def process_queue(self):
        #Create the first thread
        thread_count = threading.active_count()
        if thread_count <= 1:
            logger.info('Creating thread')
            t = Thread(target=self.doWork)
            t.daemon = True
            t.start()
            logger.debug('Created thread')

    #Define the functionality each workers gets
    def doWork(self):
        #Build a loop that keeps the thread alive until queue is empty
        while not q.empty():
            #Build up the threads
            thread_count = threading.active_count()
            #Make sure that the thread count is lower than configured limit and is lower than the queue size
            if thread_count < concurrent and thread_count < q.qsize():
                new_thread_count = thread_count + 1
                logger.info(
                    'Current queue size(%i) allows more threads. Creating additional thread: %i'
                    % (q.qsize(), new_thread_count))
                t = Thread(target=self.doWork)
                t.daemon = True
                t.start()
                logger.debug('Created thread: %i' % new_thread_count)

            #Retrieve a queued item
            queued_item = q.get()

            #Handle a queued item based on its provided action
            if queued_item['action'] == "search_observables":
                logger.info('Working on %s from queue, caseid: %s' %
                            (queued_item['action'], queued_item['data']['id']))

                case_data = queued_item['data']
                logger.debug("event: %s" % case_data)
                #Store CaseID
                caseid = case_data['id']

                #If the case is within scope of the search range. Perform the search
                #if (case_data['customFields']['lastSearched']['date'] - case_data['customFields']['firstSearched']['date']) < 5184000000:
                #logger.info('Observables in case {} have not yet been searched for longer than two months. Starting analyzers'.format(case_data['id']))
                self.response = self.thapi.get_case_observables(caseid)

                #Perform a search for ioc's per case in the RS search results (Trigger Cortex Analyzer)
                for observable in self.response.json():
                    searched_for = False
                    logger.debug("observable: %s" % observable)
                    logger.debug("current_time %s, observable_time %s" %
                                 (self.current_time, observable['startDate']))
                    #Check if observables are not older than 2 months or 6 months for TLP:RED
                    if (((self.current_time - observable['startDate']) <
                         5184000000)
                            or (observable['tlp'] == 3 and
                                ((self.current_time - observable['startDate'])
                                 < 15552000))):
                        self.searchtype = observable['dataType']
                        if self.searchtype in self.qr_enabled_datatypes:
                            self.supported_observable = observable['_id']

                            #Trigger a search for the supported ioc
                            logger.info(
                                'Launching analyzers for observable: {}'.
                                format(self.supported_observable))
                            self.response = self.thapi.run_analyzer(
                                "Cortex-intern", self.supported_observable,
                                "IBMQRadar_Search_Automated_0_1")
                            self.check_response(self.response)
                            searched_for = True

                if searched_for:
                    #Add customFields firstSearched and lastSearched
                    #Create a Case object? Or whatever it is
                    self.case = Case()

                    #Add the case id to the object
                    self.case.id = caseid

                    #Debug output
                    logger.info('Updating case %s' % self.case.id)

                    #Define which fields need to get updated
                    fields = ['customFields']

                    #Retrieve all required attributes from the alert and add them as custom fields to the case
                    self.customFields = CustomFieldHelper()\
                        .add_date('firstSearched', case_data['customFields']['firstSearched']['date'])\
                        .add_date('lastSearched', self.current_time)\
                        .build()

                    #Add custom fields to the case object
                    self.case.customFields = self.customFields

                    #Update the case
                    self.response = self.thapi.update_case(self.case, fields)
                    self.check_response(self.response)

        logger.info("Queue is empty, nothing left to do")
Пример #11
0
class TheHive(AppBase):
    """
    An example of a Walkoff App.
    Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes.
    """
    __version__ = "0.0.3"
    app_name = "thehive"

    def __init__(self, redis, logger, console_logger=None):
        """
        Each app should have this __init__ to set up Redis and logging.
        :param redis:
        :param logger:
        :param console_logger:
        """
        self.thehive = TheHiveApi(secret.url, secret.apikey)
        super().__init__(redis, logger, console_logger)

    async def show_secret(self):
        return "url=%s, apikey=%s" % (secret.url, secret.apikey)

    async def get_case_count(self, title_query):
        response = self.thehive.find_cases(query=String("title:'%s'" %
                                                        title_query),
                                           range='all',
                                           sort=[])
        casecnt = len(response.json())
        return casecnt

    async def string_contains(self, field, string_check):
        if string_check in field.lower():
            return True

        return False

    async def string_startswith(self, field, string_check):
        if field.lower().startswith(string_check):
            return True

        return False

    # Gets an item based on input. E.g. field_type = Alert
    async def get_item(self, field_type, cur_id):
        newstr = ""
        ret = ""
        if field_type.lower() == "alert":
            ret = self.thehive.get_alert(cur_id)
        elif field_type.lower() == "case":
            ret = self.thehive.get_case(cur_id)
        elif field_type.lower() == "case_observables":
            ret = self.thehive.get_case_observables(cur_id)
        elif field_type.lower() == "case_task":
            ret = self.thehive.get_case_task(cur_id)
        elif field_type.lower() == "case_tasks":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "case_template":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "linked_cases":
            ret = self.thehive.get_linked_cases(cur_id)
        elif field_type.lower() == "task_log":
            ret = self.thehive.get_task_log(cur_id)
        elif field_type.lower() == "task_logs":
            ret = self.thehive.get_task_logs(cur_id)
        else:
            return "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type

        newstr = str(ret.json()).replace("\'", "\"")
        newstr = newstr.replace("True", "true")
        newstr = newstr.replace("False", "false")
        return newstr

    # Not sure what the data should be
    async def update_field_string(self, field_type, cur_id, field, data):
        # This is kinda silly but..
        if field_type.lower() == "alert":
            newdata = {}

            if data.startswith("%s"):
                ticket = self.thehive.get_alert(cur_id)
                if ticket.status_code != 200:
                    pass

                newdata[field] = "%s%s" % (ticket.json()[field], data[2:])
            else:
                newdata[field] = data

            # Bleh
            url = "%s/api/alert/%s" % (secret.url, cur_id)
            if field == "status":
                if data == "New" or data == "Updated":
                    url = "%s/markAsUnread" % url
                elif data == "Ignored":
                    url = "%s/markAsRead" % url

                ret = requests.post(url,
                                    headers={
                                        'Content-Type':
                                        'application/json',
                                        'Authorization':
                                        'Bearer %s' % secret.apikey
                                    })
            else:
                ret = requests.patch(
                    url,
                    headers={
                        'Content-Type': 'application/json',
                        'Authorization': 'Bearer %s' % secret.apikey
                    },
                    json=newdata,
                )

            return ret.status_code
        else:
            return 0
Пример #12
0
class Offense(object):
    """
        Class used for handling offenses and customers. 
        Uses customer.py to handle each and every customer in the configuration file.
    """

    def __init__(self):
        self.customers = []
        self.db_status = False
        if cfg.TheHive:
            self.hive = TheHiveApi("http://%s" % cfg.hiveip, cfg.hiveusername, 
                            cfg.hivepassword, {"http": "", "https": ""})
            self.cortex_log_path = "log/cortex_analysis.log"
            self.cortex_listener = cortex_listen(self.cortex_log_path)

    # Function only in use when either customer_values.db does not exists or is empty
    def db_setup(self):
        """
	    Creates db for a customer if it doesn't exist.	
        """
        database = "%s/database/customer_values.db" % dir_path
        if not os.path.isfile(database):
            open(database, 'w+').close()

        try:
            self.db = pickledb.load(database, False)
        except pickledb.simplejson.scanner.JSONDecodeError:
            # Remove file, and recreate
            os.remove(database)
            logging.info("Creating database")
            self.db = pickledb.load(database, False)
        
    # Creates folders for customers.
    def create_customer_folder(self, customer_name):
        """
	    Creates a directory for a customer to save offenses. Used for backlogging.
        """
        customer_dir = "%s/database/customers/%s" % (dir_path, customer_name )
        if not os.path.exists(customer_dir):
            os.makedirs(customer_dir)

        
    # Creates database for customer if it doesnt exist and SEC token exists
    def create_db(self, name):
        """
	    Uses pickledb to keep track of latest offenses.
        """
        self.db_setup()
        self.create_customer_folder(name)
        if not name in self.db.getall():
            self.db.lcreate(name)
            self.db.ladd(name, 0)
            self.db.set(name+"_counter", 0)
            self.db.set(name+"_status_code", 200)
            self.db.set(name+"_code_status", 0)
            self.db.dump()
            logging.info("%s Initialized database for %s" % (self.get_time, name))
            return False
        return True

    # Gets current time for print format.
    def get_time(self):
		# Workaround for wrong time
        hourstr = time.strftime("%H")
        hourint = int(hourstr)+2
        return "%d:%s" % (hourint, time.strftime("%M:%S"))

    # Reloading the complete customers object for every iteration
    def add_customers(self, customer_json):
        """
			Creates customer object => Loops through each and every one 
			and verifies if they exist or not in the customer list. (self.customers)
        """

        self.customers = []
        # Catches exception related to unbound variables
        try:
            for item in customer_json:
                try:
                    # Verifies Json data
                    if item['SEC'] and len(item['SEC']) is 36:
                        a = Customer(item['name'], item['SEC'], \
                                     item['target'], item['version'], \
                                     item['rules'], item['subnet'], \
                                     item['cert'], item['domain'])
                        logging.info("%s: Customer %s added/reloaded to customer" % (self.get_time(), item['name']))
                        self.create_db(item['name'])
                        self.customers.append(a)
                    else:
                        logging.info("%s: No SEC token found for %s" % (self.get_time(), item['name']))
                except KeyError as e:
                    logging.warning("%s: Bad key: %s" % (self.get_time(), e))
                    continue
        except UnboundLocalError:
            return

    # Checks if the json is valid with expected inputs
    def load_objects(self, customers = []):
        """
			Verifies if the JSON interpreted contains errors and if it should be refreshed or not.
			THis function exists to make real-time addition of new customers possible.
        """
        global json_time
        file = "%s/database/customer.json" % dir_path

        # Verifies if file has been edited.
        if os.path.getmtime(file) > json_time:
            json_time = os.path.getmtime(file)
            msg = "%s: Reloading %s because of timedifference" % (self.get_time(), file)
            if len(sys.argv) > 1:
                if sys.argv[1] == "--verbose" or sys.argv[1] == "-v":	
                	print(msg)

            self.write_offense_log(msg) 

            logging.info("%s: Reloading %s because of timedifference" % (self.get_time(), file))
        else:
            logging.info("%s: No changes made to %s" % (self.get_time(), file))
            return

        try:
            with open(file, 'r') as tmp: 
                #self.verify_json(open(file, 'r'))
                customer_json = json.loads(tmp.read())
        except IOError as e:
            logging.info("%s: %s" % (self.get_time(), e))
            return
        except ValueError as e:
            logging.info("%s: %s" % (self.get_time(), e))
            return 

        # Create customer info 
        customer_value = self.add_customers(customer_json)
        return customer_value
        
    # Uses Sveve for SMS sending
    def send_sms(self, message):
        """
	    Originally made to send an SMS with the message variable to a specific number.
        """
        logging.info("%s: %s" % (self.get_time(), "Attempting to send sms"))

        if isinstance(message, dict):
            message = "\n".join(message['categories'])

        passwd=""

        # Measure to not make api calls for SMS service.
        if not passwd:
            logging.info("%s: %s" % (self.get_time(), "Aborting sms sending"))
            return

        username = "******"
        url = "https://sveve.no/SMS/SendMessage?"
        target = ""
        sender = "IT ME"

        tot_url = "%suser=%s&passwd=%s&to=%s&from=%s&msg=%s - %s" % (url, username, passwd, target, sender,  message['id'], message)
        tot_url += "%20SMS"
        logging.info("%s: should send alarm for ****\n%s" % (self.get_time(), tot_url))

        try:
            request = requests.get(tot_url, timeout=5, verify=False)
        except (requests.exceptions.ConnectionError,\
                requests.exceptions.ReadTimeout,\
                AttributeError) as e:
            logging.warning("%s: %s" % (self.get_time(), e))

        return 

    # Runs the alarm
    def run_alarm(self, item, customer):
        """
	    Originally used to control on-screen offenses, but later found to be annoying.
        """
        logging.info("%s: New highest offense - %s - customer %s, %s" % \
            (self.get_time(), item['id'], customer.name, item['categories']))

        if self.db.get(customer.name+"_counter") is 0:
            self.db.set(customer.name+"_counter", \
            int(self.db.get(customer.name+"_counter"))+1)
            return

        logging.warning("%s: Sending alarm to %s" % (self.get_time(), customer.name))
        new_data = urllib.quote("Offense #%s: %s" % \
                            (item['id'], "\n".join(item['categories'])))

        # Return to only get one alarm at a time per customer.
        return False

    def reverse_list(self, customer, request):
        """
			Reverses a list. QRadar API > 7.0 wasn't stable.
        """
        tmp_arr = []
        if not customer.new_version:
            for i in range(len(request.json())-1, -1, -1):
                tmp_arr.append(request.json()[i])
            return tmp_arr
        else:
            return request.json()

    # Removes the "Range" header for some specific API calls.
    def remove_range_header(self, customer):
        """
			Removes a specific header. Depends on which API call is used.
        """
        headers = dict.copy(customer.header)

        try:
            del headers["Range"] 
        except KeyError as e:
            logging.warning("%s: Bad key: %s" % (self.get_time(), e))
        return headers

    # If it doesn't exist already
    def find_ip(self, customer, ID, headers, src_dst="src"):
        """
			Finds and IP based on ID.
			Almost same as above, but not in bulk.
        """
        search_field = ""
        find_ip = ""

        if src_dst == "dst":
            src_dst = "local_destination_addresses" 
            search_field = "local_destination_ip"
        else:
            src_dst = "source_address_ids" 
            search_field = "source_ip"

        target_path = "https://%s/api/siem/%s" % (customer.target, src_dst)
        header = self.remove_range_header(customer)

        try:
            find_ip = requests.get(target_path+"/%s?fields=id%s%s" % \
                (str(ID), "%2C", search_field), headers=header, timeout=5, verify=False)
        except (requests.exceptions.ConnectionError,\
                requests.exceptions.ReadTimeout,\
                AttributeError) as e:
            logging.warning("%s: %s" % (self.get_time(), e))

        try:
            ret_val = find_ip.json()[search_field]
        except (KeyError, UnboundLocalError) as e:
            ret_val = False

        return ret_val

    # Gets the a list of IDs related to IPs 
    def get_reflist(self, customer, ref_name):
        """
            Gets the actual data used to correlate with customer.json rules.
        """
        fields = ""
        headers = self.remove_range_header(customer)
        
        ref_list = "https://%s/api/reference_data/sets/%s" % (customer.target, ref_name) 

        try:
            ref_set = requests.get("%s" % ref_list, headers=headers, timeout=5, verify=False)
        except (requests.exceptions.ConnectionError,\
                requests.exceptions.ReadTimeout,\
                AttributeError) as e:
            logging.warning("%s: %s" % (self.get_time(), e))

        return ref_set

    def get_network_list(self, network_list):
        """
	    Finds the list of networks that are more valuable (e.g. server network)
        """
        arr = []
        for subnet in network_list:
            arr.append(subnet["value"])

        return arr

    # Returns 
    def get_affected_subnet(self, req, customer, network_list, id_list_name, src_dst):
        """
            Checks if the network found in an offense is part of the actual subnet
        """
        affected_subnet = []
        headers = self.remove_range_header(customer)

        if src_dst == "dst":
            ip_variable = "local_destination_ip"
            base_url = "https://%s/api/siem/local_destination_addresses/" % customer.target
            fields = "?fields=local_destination_ip" 
        elif src_dst == "src":
            ip_variable = "source_ip"
            base_url = "https://%s/api/siem/source_addresses/" % customer.target
            fields = "?fields=source_ip" 

        for ID in req.json()[id_list_name]:
            url = base_url+str(ID)+fields
            cnt = 0


            try:
                ip = requests.get(url, headers=headers, verify=False, timeout=5)
            except requests.exceptions.ConnectionError:
                continue

            try:
                ip = ip.json()[ip_variable]
            except KeyError as e:
                logging.warning("%s: %s" % (self.get_time(), e))
                continue

            for network in network_list:
                try:
                    if ip in netaddr.IPNetwork(network):
                        return ip

                except netaddr.core.AddrFormatError as e:
                    logging.warning("%s: %s" % (self.get_time(), e))
                    cnt += 1

        return False

    # Verifies alarms related to reference lists
    def verify_reflist(self, customer, req):
        """
            Verifies multiple reference set alarms. 
        """

        id_list = ["source_address_ids", "local_destination_address_ids"]
    
        affected_subnet = []

        # List of subnets to check
        for ref_set_list in customer.ref_list:
            ref_set = self.get_reflist(customer, ref_set_list)

            # Works because < 255
            if not ref_set.status_code is 200:
                logging.warning("Cannot access reflist.")
                continue

            try:
                network_list = self.get_network_list(ref_set.json()["data"])
            except KeyError as e:
                logging.warning("%s: %s" % (self.get_time(), e))
                if ref_set.json()["number_of_elements"] is 0:
                    msg = "%s might be empty for %s, no action taken." \
                            % (ref_set_list, customer.name)

                    if len(sys.argv) > 1:
                    	if sys.argv[1] == "--verbose" or sys.argv[1] == "-v":	
                    		print(msg)

                    self.write_offense_log(msg) 
					

                continue

            src_affected_subnet = self.get_affected_subnet(req, customer, \
                    network_list, "source_address_ids", "src")
            if src_affected_subnet:
                #sys.stdout.write("SUBNET %s. " % src_affected_subnet)
                return True

            dst_affected_subnet = self.get_affected_subnet(req, customer, \
                    network_list, "local_destination_address_ids", "dst")

            if dst_affected_subnet:
                return True

        return False

    def check_alarm(self, ID, customer):
        """
            Verifies an ID, if it's new etc. Bulk loads and checks if the lowest number 
            is greater than the oldest saved one.
            The horrible forloop verifies if rules are matched based on rules in customer.json
        """
        fields = ""
        valid = True 

        headers = self.remove_range_header(customer)

        try:
            req = requests.get("https://%s/api/siem/offenses/%s%s" % (customer.target, str(ID), fields),\
                     timeout=5, headers=headers, verify=False) 
        except (requests.exceptions.ConnectionError,\
                requests.exceptions.ReadTimeout,\
                AttributeError) as e:
            logging.warning("%s: %s" % (self.get_time(), e))
            return False

        if req.status_code != 200:
            logging.warning("%s Unable to retrieve %s" % (self.get_time(), customer.target))
            return False

        # Checks reference lists from database/customer.json
        if customer.ref_list[0]:
            valid = self.verify_reflist(customer, req) 
        else:
            return False
    
        # Skips if reference list match
        # Can add alarm sending in this one
        
        if not valid:
            return False

        logging.info("%s: %s" % (self.get_time(), \
            "In subnet range. Verifying rules for %s" % customer.name))

        # Checks rules only if offense contains IP in specified IP range
        rule_counter = 0
        for rules in customer.rules: 
            # Iter keys inside rule
            for keys, values in rules.iteritems():
                # Do stuff if not integer values
                if not isinstance(values, int):
                    if values == ".*":
                        rule_counter += 1
                        continue
                    # Checks multiple arguments in same rule split on "|". 
                    for split_item in values.split("|"):
                        for categories in req.json()[keys]:
                            if split_item.lower().startswith("!") \
                                and split_item.lower()[1:] in categories.lower():
                                return False
                                #rule_counter -= 1

                            if split_item.lower() in categories.lower(): 
                                rule_counter += 1

                # INT CHECK
                else:
                    if req.json()[keys] > values:
                        rule_counter += 1
                    else:
                        break

            # Runs alarm if counter is high enough. 
            if rule_counter is len(rules):
                msg = "RULES MATCHED. SHOULD SEND ALARM \o/"
                if len(sys.argv) > 1:
                	if sys.argv[1] == "--verbose" or sys.argv[1] == "-v":	
                		print(msg)

                self.write_offense_log(msg) 
					
                logging.info("%s: Rule triggered - sending alarm" % self.get_time())
                self.run_alarm(req.json(), customer)
                break

            rule_counter = 0
        return True

    # Verify ID here
    def add_new_ID(self, customer, request):
        path = "database/customers/%s/%s" % (customer.name, str(request.json()["id"]))

        if not os.path.exists(path):
            with open(path, "w+") as tmp:
                json.dump(request.json(), tmp)

        logging.info("%s: Added new offense to %s" % (self.get_time(), path))

    # DISCORD SETUP 
    def discord_setup(self, ID, msg):
        alarm_msg = "%s - %s" % (ID, msg)
        call = ["python3.6", "%s/dependencies/chat.py" % dir_path, "\"%s\"" % alarm_msg]
        subprocess.call(" ".join(call), shell=True)
        logging.info("%s: Message sent to discord server." % self.get_time())

    # BEST LOGGER AYY \o/ LMAO
    def write_offense_log(self, data):
        with open("log/offense.log", "a") as tmp:
            try:
                tmp.write("\n%s" % str(data))
            except UnicodeEncodeError as e:
                tmp.write("\nError in parsing data.\n%s" % e)

    # Returns tasklist based on casetitle
    def get_hive_task_data(self, data):
        # Reload every time so it's editable while running.
        with open(cfg.incident_task, "r") as tmp:
            cur_data = json.load(tmp)

        # Is cur_data["description"] in data["description"]:
        for item in json.load(open(cfg.incident_task, "r"))["ruleslist"]:
            if item["description"].lower() in data["description"].lower():
                return item["result"]

    # Checks the normal local subnet ranges. Theres like 7 missing.
    def check_local_subnet(self, ip_address):
        # Returns false if ip not a local address 
        # Yes I know there are more..
        local_ranges = [
            "192.168.0.0/16",
            "172.16.0.0/12",
            "10.0.0.0/8"
        ]

        for item in local_ranges:
            if netaddr.IPAddress(ip_address) in netaddr.IPNetwork(item): 
                return False 

        return True 

    # IP verification lmao
    def verify_offense_source(self, input):
        try:
            netaddr.IPAddress(str(input))
            if not self.check_local_subnet(input):
                return False

            return True
        except netaddr.core.AddrFormatError:
            return False

    # Returns all IPs in an offense by ID
    def get_ip_data(self, customer, data):
        verify_local_ip = [] 

        # Should prolly cache this data.
        # Finds IPs based on and ID - destination
        if data["local_destination_count"] > 0:
            for item in data["local_destination_address_ids"]:
                ip_output = self.find_ip(customer, item, customer.header, "dst")
                if ip_output:
                    if ip_output not in verify_local_ip and self.check_local_subnet(ip_output):
                        verify_local_ip.append(str(ip_output))

        # Finds IPs based on and ID - source 
        if data["source_count"] > 0:
            for item in data["source_address_ids"]:
                ip_output = self.find_ip(customer, item, customer.header)
                if ip_output:
                    if ip_output not in verify_local_ip and self.check_local_subnet(ip_output):
                        verify_local_ip.append(str(ip_output))

        return verify_local_ip

    # Only created for IP currently.
    # Hardcoded for QRadar
    def get_hive_cases(self, customer, data):
        # Offense doesn't return all the IP-addresses.
        verify_local_ip = self.get_ip_data(customer, data)
        find_source = self.verify_offense_source(data["offense_source"])
        
        # Adds offense source if IP observed
        if find_source:
            verify_local_ip.append(str(data["offense_source"]))

        # Returns if no observables found
        # Also means a case will not be created.
        if not verify_local_ip:
            return False

        # Check basic case details first. Customername > Name of offense > category
        # Might be able to search title field for customer name as well. Tags can also be used.
        allcases = self.hive.find_cases(query={"_field": "status", "_value": "Open"})
        customer_caselist = []

        # Finds all the specified customers cases
        for item in allcases.json():
            if customer.name.lower() in item["title"].lower():
                customer_caselist.append(item)

        # Creates a case if no cases are found. Returns list of observed IoCs for case creation
        if not customer_caselist:
            return verify_local_ip 

        use_case = ""
        casename = ""
        # Looks for exact casename match 
        for case in customer_caselist:
            casetitle = case["title"].split(" - ")[1]
            if casetitle == data["description"]:
                use_case = case
                break

        if use_case:
            not_matching = []
            matching_categories = data["categories"]

        # Try to match two categories if exact name match isn't found
        if not use_case:
            # Least amount of categories needed to match
            category_match_number = 2

            category_counter = 0
            for case in customer_caselist:
                matching_categories = []
                not_matching = []
                for category in data["categories"]: 
                    if category in case["tags"]:
                        matching_categories.append(category)
                    else:
                        not_matching.append(category)

                if len(matching_categories) > (category_match_number-1):
                    use_case = case
                    break

        # Will create a new case if observable found and no similar case.
        if not use_case:
            return verify_local_ip 
                 
        # FIX - Hardcoded datatype
        datatype = "ip"
        actual_data = []

        # Finds actual observables for the specified case
        observables = [x["data"] for x in self.hive.get_case_observables(\
            use_case["id"]).json() if x["dataType"] == datatype]

        # Finds if observable exists in previous list
        actual_data = [x for x in verify_local_ip if not x in observables]

        # FIX - check logic here. Might need to add tags etc (offenseID) etc.
        # Only appends data if new observables are detected
        if not actual_data:
            return False

        # Defines what categories to append
        category_breaker = ""
        if not_matching:
            category_breaker = not_matching
        else:
            category_breaker = matching_categories
            
        self.add_observable_data(use_case["id"], actual_data, datatype, data, not_matching) 

        # False to not create another case
        return False

    # Add by caseid and list of specified datatype and a QRadar offense
    def add_observable_data(self, case_id, observables, datatype, data, category):
        observable_items = []
        data_items = []

        tags = [str(data["id"])]
        tags.extend(category)

        for item in observables:
            observable = CaseObservable(
                dataType=datatype,
                data=item,
                tlp=0,
                ioc=True,
                tags=tags,
                message="Possible IoC"
            )

            # Creates the observable
            ret = self.hive.create_case_observable(case_id, observable)
            if ret.ok:
                observable_items.append(ret.json())
                data_items.append(item)
            else:
                continue

        if data_items:
            self.cortex_listener.run_cortex_analyzer(datatype, data_items, observable_items)

    # TheHive case creation
    def create_hive_case(self, customer, data):
        create_hive_bool = self.get_hive_cases(customer, data)

        # Returns if case already merged.
        if not create_hive_bool:
            return False

        # Baseline for creating a case
        title = ("%s: %s - %s" % (customer.name, str(data["id"]), data["description"]))
	static_task = "Why did it happen? Check rule.",
        task_data = self.get_hive_task_data(data)
        tasks = [
            CaseTask(title=static_task)
        ]
        if task_data:
            for item in task_data:
                tasks.append(CaseTask(title=item))

        # Creates a case object
        case = Case(title=title, tlp=0, flag=False, tags=data["categories"], \
                description=data["description"], tasks=tasks)

        # Creates the actual case based on prior info
        ret = self.hive.create_case(case)

        if ret.ok:
            # FIX, datatype is static
            self.add_observable_data(ret.json()["id"], create_hive_bool, \
                "ip", data, data["categories"])
            return True 

        return False

    # Verifies the ID, and returns if it's not a new incident.
    def verify_ID(self, request, customer):
        # In case there are no offenses related to customer. Basically domain management.
        # Attempts to reanalyze in case of failed analysis jobs

        #self.cortex_listener.find_failed_cortex_jobs()

        try:
            if float(customer.version) < 7.0:
                try:
                    json_id = request.json()[len(request.json())-1]['id']
                except (ValueError, IndexError) as e:
                    logging.warning("%s: Customer %s: %s" % (self.get_time(), customer.name, e))
                    return False
                customer.new_version = False
            else:
                json_id = request.json()[0]['id']
        except IndexError:
            logging.info("No offenses for customer.")
            return

        # Use difference between last seen offense and newest.
        last_db = self.db.lget(customer.name, self.db.llen(customer.name)-1)
        cur_array = []
        if json_id > last_db:
            difference = 1

            # Not even a point /o\
            if not json_id-last_db is difference:
                difference = json_id-last_db

            # Looping through incase of earlier crash / multiple offenses in one minute
            for i in range(json_id, last_db, -1):
                cur_var = False 
                if i in self.db.get(customer.name):
                    continue

                # Verifies if the id actually exists
                for item in request.json():
                    if i == item['id']:
                        cur_var = True
                        break

                if not cur_var:
                    continue      

                logging.info("%s: %s: New highest offense found: %d" % (self.get_time(), customer.name, i))

                target = "https://%s/api/siem/offenses/%s" % (customer.target, str(i))
                new_header = self.remove_range_header(customer)

                try:
                    new_req = requests.get(target, headers=new_header, timeout=5, verify=False)
                except requests.exceptions.ConnectionError as e:
                    logging.warning("Internal alarmserver might be down: %s" % e)
                    continue
                except requests.exceptions.ReadTimeout as e:
                    logging.warning("Timeout %s" % e)
                    continue
                # Appends current offense to database/customers/customer/ID in json format. 
                # This is to backtrack 
                ID_ret = self.add_new_ID(customer, new_req)
                new_req = new_req.json()

                try: 
                    # Compatibility issue if missing prerequisites.
                    new_data = urllib.quote("Offense #%s: %s" % (str(i), \
                                     "\n".join(new_req['categories'])))
                except TypeError as e:
                    logging.warning("%s: TypeError: %s" % (self.get_time(), e))
                    new_data = urllib.quote("Offense #%s: %s" % (str(i), "Arbitrary categories"))
                except KeyError as e:
                    logging.warning("%s: KeyError: %s" % (self.get_time(), e))
                    new_data = urllib.quote("Offense #%s: %s" % (str(i), "Arbitrary categories"))

                # Sends a local alarm if an alarmserver is running on the current system. 

                # Prints to screen. Try/catch only in case of errors.
                try:
                    msg = "%s: %s - %s - %s" % (self.get_time(), \
                        str(i).ljust(5), customer.name.ljust(10), ", ".join(new_req['categories']))
                    if len(sys.argv) > 1:
                    	if sys.argv[1] == "--verbose" or sys.argv[1] == "-v":	
                    		print(msg)

                    self.write_offense_log(msg) 
					
                except TypeError as e:
                    logging.warning("%s: TypeError: %s" % (self.get_time(), e))
                except KeyError as e:
                    logging.warning("%s: KeyError: %s" % (self.get_time(), e))

                if cfg.TheHive:
                    self.create_hive_case(customer, new_req) 
                if cfg.discordname and cfg.discordpw:
                    self.discord_setup(str(i), ", ".join(new_req['categories']))

                # verifying if an alarm should be triggered.
                difference = json_id-self.db.llen(customer.name)-1

                # Adds data to the DB
                cur_array.append(i)

                alarm_check = self.check_alarm(i, customer)
                if not alarm_check:
                    continue 

            # Adds all the data to the database
            if cur_array:
                cur_array = sorted(cur_array)

                for items in cur_array:
                    self.db.ladd(customer.name, items)

                
        else:
            return False
	
    # Reload json every time, and check it to prevent failures. verify_json(self, x) 
    def check_connection(self):
        global resetcounter
        for customer in self.customers:
            self.db.dump()
            domain_field = ""
            self.db.set(customer.name+"_counter", int(self.db.get(customer.name+"_counter"))+1)

            # Verifies status codes
            if not self.db.get(customer.name+"_status_code") is 200 \
                and customer.fail_counter % 10 > 0:
                continue

            # Domain management because of some bullshit.
            if customer.domain > 0:
                domain_field = "?filter=domain_id%s%d" % (r'%3D', customer.domain)

            # Makes original request per customer
            try:
                request = requests.get('%s%s' % (customer.target_path, domain_field), \
                    headers=customer.header, timeout=5, verify=False)
            except (requests.exceptions.ConnectionError,\
                    requests.exceptions.ReadTimeout,\
                    AttributeError) as e:
                try:
                    logging.info("%s: Connection failure for %s" % \
                                (self.get_time(), customer.name))
                    continue
                except TypeError as e:
                    logging.warning("%s" % e)
                    self.db.set(customer.name+"_status_code", 401)
                    continue

            # Set previous status code?
            # Legacy, but doesn't hurt nothing \o/
            if request.status_code != 200:
                logging.info("%s: Not 200 for %s - %s" % (self.get_time(), customer.name, \
                            self.db.get(customer.name+"_status_code")))
                self.db.set(customer.name+"_status_code", request.status_code)
                continue
                
            # Sets previous status code in case of shutdown
            self.db.set(customer.name+"_status_code", request.status_code)

            verify_request = self.verify_ID(request, customer)
            if not verify_request: 
                continue
Пример #13
0
class Reporter(Responder):
    """
    This Reporter class automates the effort of producing a case-report and optionally all of its associated data,
    such as observables and tasks.
    The primary function of this algorithm, is to take a JSON-structure gathered from TheHive's API, and filter it
    as per the provided filter-parameters given on activation.

    The algorithm assumes, that the dataset that's worked on, is either of any primitive type like int and string or
    a specific data structure list or dict, hence the dict- and list-builders.

    In short, the algorithm generates a new filtered tree-structure in JSON-format, which can be N-wide and N-deep.
    """
    def __init__(self):
        Responder.__init__(self)
        self.case_data_filter = [
            "endDate", "startDate", "title", "createdAt", "caseId", "pap",
            "tlp", "severity", "owner", "createdBy", "updatedBy", "summary",
            "tags", "resolutionStatus", "impactStatus", "status",
            "customFields"
        ]
        self.case_observables_filter = [
            "data", "dataType", "sighted", "tags", "createdAt", "createdBy",
            "pap", "tlp", "ioc", "startDate", "status"
        ]
        self.case_tasks_filter = [
            "caseTasks", "updatedBy", "createdAt", "flag", "description",
            "title", "createdBy", "updatedAt", "order", "status", "group"
        ]

        self.api_key = self.get_param('config.api_key', None,
                                      'Missing API-key')
        self.https_address = self.get_param('config.https_address',
                                            'localhost')
        self.https_port = self.get_param('config.https_port', 9000,
                                         'Missing thehive port')
        self.smtp_host = self.get_param('config.smtp_host', 'localhost')
        self.smtp_port = self.get_param('config.smtp_port', '25')
        self.mail_from = self.get_param('config.from', None,
                                        'Missing sender email address')
        self.api = TheHiveApi(
            f"https://{self.https_address}:{self.https_port}", self.api_key)

    def get_case_data(self, case_id):
        """
        Contacts the TheHive-API, and gets the case-data,. (Maybe not useful once the program is integrated with Cortex,
        as the JSON-object given upon a call of the responder, is matching the fetched data from this function.)

        Args:
            case_id (str): id of the case to be gathered from

        Returns:
            str: returns a string in JSON-format
        """
        case_data = self.api.get_case(case_id=case_id)

        if case_data.status_code == 200:
            json_data = {"caseData": case_data.json()}

            return json_data
        else:
            print(f'ko: {case_data.status_code}/{case_data.text}')
            sys.exit(0)

    def get_case_observables(self, case_id: str):
        """
        Contacts the TheHive-API, and gets the observables from given case_id

        Args:
            case_id (str): id of the case to be gathered from

        Returns:
            str: returns a string in JSON-format
        """
        case_data = self.api.get_case_observables(case_id=case_id)

        if case_data.status_code == 200:
            json_data = {"caseObservables": case_data.json()}

            return json_data
        else:
            print(f'ko: {case_data.status_code}/{case_data.text}')
            sys.exit(0)

    def get_case_tasks(self, case_id: str):
        """
        Contacts the TheHive-API, and gets the associated tasks from the given case-id.

        Args:
            case_id (str): id of the case to be gathered from

        Returns:
            str: returns a string in JSON-format
        """
        case_data = self.api.get_case_tasks(case_id=case_id)

        if case_data.status_code == 200:
            json_data = {"caseTasks": case_data.json()}

            return json_data
        else:
            print(f'ko: {case_data.status_code}/{case_data.text}')
            sys.exit(0)

    def dict_builder(self,
                     data: dict,
                     filter: list,
                     result_obj: object = None,
                     path: list = None):
        """
        1 of 2 algorithms to recursively generate a JSON-structure, which is filtered by the provided filters given upon
        activation of this module in Cortex.

        Args:
            data (dict): new node of values, which can contain either a new node(list or dict) or a leaf (primitive type)
            filter: (list): target list of values which the new JSON-should contain
            result_obj: the new JSON-object, which holds the current progress of the algorithm
            path: used to keep track of the depth of the new JSON-object, to maintain the original object-structure

        Examples:
            FILTER_CANDIDATES = ["caseData", "title", "description"] - This takes all the data from caseData dict,
            and will not go deeper into it to search for "title" and "description", since they are a subset of "caseData".

        Returns:
            str: the JSON-string and the underlying levels recursively built
        """

        if path is None:
            path = []
        if result_obj is None:
            result_obj = {}

        for key, value in data.items():
            if filter.__contains__(key) or path.__contains__(filter):
                extended_path = self.extend_path(key, path)
                self.add_primitive_value(result_obj, '<br>', extended_path,
                                         value, 50)
                path.remove(key)

            elif isinstance(value, dict) or isinstance(value, list):
                extended_path = self.extend_path(key, path)
                if isinstance(value, dict):
                    exec(f'result_obj{extended_path} = {{}}')
                    self.dict_builder(value, filter, result_obj, path)

                elif isinstance(value, list):
                    exec(f'result_obj{extended_path} = []')
                    self.list_builder(value, filter, result_obj, path)

                self.check_for_empty_object(key, extended_path, result_obj,
                                            path)

        return result_obj

    def list_builder(self,
                     data: list,
                     filter: list,
                     result_obj: object = None,
                     path: list = None):
        """
        1 of 2 algorithms to recursively generate a JSON-structure, which is filtered by the provided filters given upon
        activation of this module in Cortex.

        Args:
            data (dict): new node of values, which can contain either a new node(list or dict) or a leaf (primitive type)
            filter: (list): target list of values which the new JSON-should contain
            result_obj: the new JSON-object, which holds the current progress of the algorithm
            path: used to keep track of the depth of the new JSON-object, to maintain the original object-structure

        Examples:
            FILTER_CANDIDATES = ["caseData", "title", "description"] - This takes all the data from caseData dict,
            and will not go deeper into it to search for "title" and "description", since they are a subset of "caseData".

        Returns:
            str: the JSON-string and the underlying levels built recursively.
        """
        if path is None:
            path = []
        if result_obj is None:
            result_obj = {}

        for value in data:
            if filter.__contains__(value) or path.__contains__(filter):
                key_chain = self.build_path(path)
                self.add_primitive_value(result_obj, '<br>', key_chain, value,
                                         50)

            else:
                if isinstance(value, dict):
                    exec(f'result_obj{self.build_path(path)}.append({{}})')
                elif isinstance(value, list):
                    exec(f'result_obj{self.build_path(path)}.append([])')

                base_list = f'result_obj{self.build_path(path)}'
                base_index = eval(f'len({base_list}) - 1')
                extended_path = self.extend_path(base_index, path)

                if isinstance(value, dict):
                    self.dict_builder(value, filter, result_obj, path)
                elif isinstance(value, list):
                    self.list_builder(value, filter, result_obj, path)

                self.check_for_empty_object(base_index, extended_path,
                                            result_obj, path)

        return result_obj

    def extend_path(self, key, path: list):
        """
        adds another level to the tree-like JSON-object
        Args:
            key: name of the new node to be added.
            path: previous nodes, to enable chained index-operators.

        Returns:
            list: expanded list, representing the addition of an additional level.
        """
        path.append(key)
        return self.build_path(path)

    def add_primitive_value(self,
                            result_obj,
                            string_separator: str,
                            extended_path: str,
                            value,
                            string_width=64):
        """
        Adds a new leaf-type value to the JSON-tree structure.
        Checks for long strings, and adds separator to them.

        Args:
            string_width:
            string_separator:
            extended_path:
            value:

        """
        if len(str(value)) > string_width and isinstance(value, str):
            formatted_value = self.insert(string_separator, value,
                                          string_width)
            exec(f"result_obj{extended_path} = \'{formatted_value}\'")
        else:
            exec(f"result_obj{extended_path} = value")

    @staticmethod
    def build_path(path: list):
        """
        Builds a chain of index operators, based on the elements in the provided path list.
        Args:
            path: list of index values.

        Returns:
            str: stringified chain of index operators
        """

        eval_string = ""
        for element in path:
            if isinstance(element, int):
                eval_string += f"[{element}]"
            else:
                eval_string += f"[\'{element}\']"

        return eval_string

    @staticmethod
    def insert(separator, string, line_width=64):
        """
             Helpermethod used to format strings of text.

             Args:
                 separator: operator to insert into text.
                 string: the text to add the elements to.
                 line_width: insert after how many characters.

             Returns:
                 str: formatted strings with the added elements in every interval.
             """
        words = iter(string.split())
        lines = []
        current = next(words)

        for word in words:
            if len(current) + 1 + len(word) > line_width:
                lines.append(current)
                current = word
            else:
                current += " " + word

        lines.append(current)
        result = separator.join(lines)
        return result

    @staticmethod
    def check_for_empty_object(key, key_chain: list, result_obj: object,
                               path: list):
        """
        Helpermethod for cleaning up empty objects in current level.

        Args:
            key: current tail element of the kay-chain to be removed.
            key_chain: generated chain of index brackets.
            result_obj: current build of JSON-object.
            path: current level in tree structure.
        """

        if len(eval(f'result_obj{key_chain}')) == 0:
            exec(f'del result_obj{key_chain}')
        path.remove(key)

    def send_mail(self, report_name, report_body):
        """
        Sends a mail with the generated HTML-page, which acts as a report over the case and the observables and tasks.
        Args:
            report_body:
            report_name:

        """

        mail_to = None
        if self.data_type == 'thehive:case':
            # Search recipient address in tags
            tags = self.get_param('data.tags', None,
                                  'recipient address not found in tags')
            mail_tags = [t[5:] for t in tags if t.startswith('mail:')]
            if mail_tags:
                mail_to = mail_tags.pop()
            else:
                self.error('recipient address not found in observables')
        elif self.data_type == 'thehive:alert':
            # Search recipient address in artifacts
            artifacts = self.get_param(
                'data.artifacts', None,
                'recipient address not found in observables')
            mail_artifacts = [
                a['data'] for a in artifacts
                if a.get('dataType') == 'mail' and 'data' in a
            ]
            if mail_artifacts:
                mail_to = mail_artifacts.pop()
            else:
                self.error('recipient address not found in observables')
        else:
            self.error('Invalid dataType')

        msg = EmailMessage()
        msg['Subject'] = f'Conscia Incident and Response - Case# {self.get_param("data.caseId", None, "Missing case")}'
        msg['From'] = self.mail_from
        msg['To'] = mail_to
        msg.set_content('Case Report')

        msg.add_attachment(report_body,
                           subtype='html',
                           filename=f'{report_name}.html')

        with smtplib.SMTP_SSL(self.smtp_host, self.smtp_port) as smtp:
            smtp.send_message(msg)

        self.report({'message': 'message sent'})

    def operations(self, raw):
        return [self.build_operation('AddTagToCase', tag='report sent')]

    def run(self):
        Responder.run(self)

        case_id = self.get_param("data._id")
        filtered = self.dict_builder(self.get_case_data(case_id),
                                     self.case_data_filter)
        filtered.update(
            self.dict_builder(self.get_case_observables(case_id),
                              self.case_observables_filter))
        filtered.update(
            self.dict_builder(self.get_case_tasks(case_id),
                              self.case_tasks_filter))

        with open("templates/report_template_plus_jinja.html", "r") as file:
            html_template = file.read()

        template = Template(html_template)
        html_report = template.render(data=filtered["caseData"],
                                      observables=filtered["caseObservables"],
                                      tasks=filtered["caseTasks"])

        self.send_mail("test_case", html_report)