Пример #1
0
def main():
    api = TheHiveApi(url, api_key)
    cases = api.find_cases(range="all")
    cases = cases.json()

    workbook = xlsxwriter.Workbook(excel_path, {'strings_to_urls': False})
    worksheet = workbook.add_worksheet()

    bold = workbook.add_format({"bold": True})
    worksheet.write("A1", "TheHive ID", bold)
    worksheet.write("B1", "CaseID", bold)
    worksheet.write("C1", "Status", bold)
    worksheet.write("D1", "Title", bold)
    worksheet.write("E1", "Date", bold)
    worksheet.write("F1", "Owner", bold)
    worksheet.write("G1", "TLP", bold)
    worksheet.write("H1", "Tag", bold)

    row = 1
    for item in cases:
        item_id = item["id"] if "id" in item.keys() else item["_id"]
        worksheet.write(row, 0, item_id)
        worksheet.write(row, 1, item["caseId"])
        worksheet.write(row, 2, item["status"])
        worksheet.write(row, 3, item["title"])
        worksheet.write(
            row,
            4,
            datetime.datetime.fromtimestamp(
                item["startDate"] / 1000).strftime("%Y-%m-%d %H:%M:%S"),
        )
        worksheet.write(row, 5, item["owner"])
        worksheet.write(row, 6, item["tlp"])
        worksheet.write(row, 7, ",".join(item["tags"]))
        row += 1

    worksheet2 = workbook.add_worksheet()

    worksheet2.write("A1", "TheHive ID")
    worksheet2.write("B1", "Observable DataType")
    worksheet2.write("C1", "Observable Value")
    worksheet2.write("D1", "IOC")
    row = 1
    for item in cases:
        item_id = item["id"] if "id" in item.keys() else item["_id"]
        obs = api.get_case_observables(item_id,
                                       query=query,
                                       sort=["-startDate", "+ioc"],
                                       range="all")
        obs = obs.json()
        for ob in obs:
            worksheet2.write(row, 0, item["caseId"])
            worksheet2.write(row, 1, ob["dataType"])
            if ob["dataType"] != "file":
                worksheet2.write(row, 2, ob["data"])
            else:
                worksheet2.write(row, 2, ",".join(ob["attachment"]["hashes"]))
            worksheet2.write(row, 3, True if ob["ioc"] == 1 else False)
            row += 1
    workbook.close()
def main():
    api = TheHiveApi(url, api_key)

    session = requests.Session()
    session.headers.update({"Authorization": "Bearer {}".format(api_key)})

    obs = api.get_case_observables(caseId,
                                   query={},
                                   sort=["-startDate", "+ioc"],
                                   range="all")
    obs = obs.json()
    for ob in obs:
        r = session.get(
            '{}/api/case/artifact/{}/similar?range=all&sort=-startDate'.format(
                url, ob['id']))
        data = r.json()
        if len(data) > 0:
            print(ob['data'], len(r.json()), 'results')
            titles = []
            for case in data:
                #print(case)
                cases = api.find_cases(query=Id(case['_parent']), range="all")
                print("\t - {} [{}]".format(cases.json()[0]['title'],
                                            case['_parent']))
            print()
Пример #3
0
def find_case_id(title, query, range, sort):
    hive_address = ''.join(settings.stored_hive_address[0])
    hive_api = ''.join(settings.stored_api_key[0])
    #Define the connection to thehive installation (including the generated API key).
    api = TheHiveApi(hive_address, hive_api, None, {'http': '', 'https': ''})

    response = api.find_cases(query=query, range=range, sort=sort)

    if response.status_code == 200:
        test = json.dumps(response.json(), indent=4, sort_keys=True)
        resp = json.loads(test)
        try:
            full_case_id = resp[0]['id']
            print(
                str(datetime.datetime.now()) + "  Case: " + str(query) +
                " found. Extracted full case id number (" + str(full_case_id) +
                ").")
            return full_case_id
        except IndexError:
            print(
                str(datetime.datetime.now()) + "  Case: " + str(query) +
                "has not been found.")
    else:
        #        print('ko: {}/{}'.format(response.status_code, response.text))
        sys.exit(0)
Пример #4
0
def process_events(config, events):
    """
    process events returned by cyrating
    """
    thapi = TheHiveApi(config.get('url', None), config.get('key'),
                       config.get('password', None), config.get('proxies'))

    for a in events:
        print("Working with " + a.get('name'))
        query = And(String("title:\"Reputation alert from Cyrating for \""),
                    Child('case_artifact', And(Eq('data', a.get('name')))))
        #query = String("title:\"Reputation alert from Cyrating for \"" + generate_title_from_event(a) + "\"")
        logging.debug('API TheHive - case: {}'.format(a.get('name')))
        response = thapi.find_cases(query=query, range='all', sort=[])
        logging.debug('API TheHive - Find case - status code: {}'.format(
            response.status_code))
        if response.status_code > 299:
            logging.debug('API TheHive - raw error output: {}'.format(
                response.raw.read()))
            continue
        case = response.json()

        if len(case) == 0:
            print("Create new case")
            create_th_case(thapi, a)
        else:
            print("Previous cases found - updating")
            for c in case:
                print(" - " + c.get('id') + " (" + c.get('title') + ")")
                update_case(thapi, c, a)
Пример #5
0
def main():
    """Returns global dictionary data object

    Calls The Hives API then checks if a result was returned.
    If a result was returned, check the results and time since the result was created.
    """
    data = {}
    api = TheHiveApi(server_address, api_credentials)
    r = api.find_cases(query=Eq('status', 'Open'), range='all', sort=[])
    if r.status_code == 200:
        i = 0
        data = {}
        while i < len(r.json()):
            check_date = datetime.date.today() - datetime.timedelta(days=7)
            if (r.json()[i]['createdAt'] / 1000) < time.mktime(
                    check_date.timetuple()):
                tasks = api.get_case_tasks(r.json()[i]['id'])
                inc, cnt = 0, 0
                while inc < len(tasks.json()):
                    if (tasks.json()[inc]['status']
                            == ('Waiting')) or (tasks.json()[inc]['status']
                                                == ('InProgress')):
                        cnt += 1
                    inc += 1
                data[(i)] = {
                    'id':
                    r.json()[i]['id'],
                    'owner':
                    r.json()[i]['owner'],
                    'createdAt': (time.strftime(
                        '%m/%d/%Y %H:%M:%S',
                        time.gmtime(r.json()[i]['createdAt'] / 1000.))),
                    'totalTasks':
                    len(tasks.json()),
                    'pendingTasks':
                    cnt
                }
            i += 1
    build(data)
Пример #6
0
def search(title, query, range, sort):
    #this function can be used to perform specific searches and may be helpful for designing custom searches
    # to use is follow the following function examples:
    #search("Case of title containing 'test hey now allstar'", String("title:'test hey now allstar'"), 'all', [])
    #search("Case of description containing albertid", String("description:" +AlbertID), 'all', [])
    #for full test cases visit this link: https://github.com/TheHive-Project/TheHive4py/tree/master/samples

    api = TheHiveApi(config['thehiveURL'], config['thehiveUser'],
                     config['thehivePassword'], {
                         'http': '',
                         'https': ''
                     })
    print(title)
    print('-----------------------------')
    response = api.find_cases(query=query, range=range, sort=sort)

    if response.status_code == 200:
        print("Success")

        print(json.dumps(response.json(), indent=4, sort_keys=True))
        print('')
    else:
        print('ko: {}/{}'.format(response.status_code, response.text))
        sys.exit(0)
Пример #7
0
class TheHive(AppBase):
    """
    An example of a Walkoff App.
    Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes.
    """

    __version__ = "1.1.0"
    app_name = "thehive"

    def __init__(self, redis, logger, console_logger=None):
        """
        Each app should have this __init__ to set up Redis and logging.
        :param redis:
        :param logger:
        :param console_logger:
        """
        super().__init__(redis, logger, console_logger)

    # async def run_analyzer(self, apikey, url, title_query):
    #    self.thehive = TheHiveApi(url, apikey, cert=False)

    #    response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[])
    #    return response.text

    def __connect_thehive(self, url, apikey, organisation):
        if organisation:
            self.thehive = TheHiveApi(url,
                                      apikey,
                                      cert=False,
                                      organisation=organisation)
        else:
            self.thehive = TheHiveApi(url, apikey, cert=False)

    async def search_case_title(self, apikey, url, organisation, title_query):
        self.__connect_thehive(url, apikey, organisation)

        response = self.thehive.find_cases(query=ContainsString(
            "title", title_query),
                                           range="all",
                                           sort=[])

        return response.text

    async def custom_search(self,
                            apikey,
                            url,
                            organisation,
                            search_for,
                            custom_query,
                            range="all"):
        self.__connect_thehive(url, apikey, organisation)

        try:
            custom_query = json.loads(custom_query)
        except:
            # raise IOError("Invalid JSON payload received.")
            pass

        if search_for == "alert":
            response = self.thehive.find_alerts(query=custom_query,
                                                range="all",
                                                sort=[])
        else:
            response = self.thehive.find_cases(query=custom_query,
                                               range="all",
                                               sort=[])

        if (response.status_code == 200 or response.status_code == 201
                or response.status_code == 202):
            return response.text
        else:
            raise IOError(response.text)

    async def add_case_artifact(
        self,
        apikey,
        url,
        organisation,
        case_id,
        data,
        datatype,
        tags=None,
        tlp=None,
        ioc=None,
        sighted=None,
        description="",
    ):
        self.__connect_thehive(url, apikey, organisation)

        tlp = int(tlp) if tlp else 2
        ioc = True if ioc.lower() == "true" else False
        sighted = True if sighted.lower() == "true" else False
        if not description:
            description = "Created by shuffle"

        tags = (tags.split(", ")
                if ", " in tags else tags.split(",") if "," in tags else [])

        item = thehive4py.models.CaseObservable(
            dataType=datatype,
            data=data,
            tlp=tlp,
            ioc=ioc,
            sighted=sighted,
            tags=tags,
            message=description,
        )

        return self.thehive.create_case_observable(case_id, item).text

    async def search_alert_title(self,
                                 apikey,
                                 url,
                                 organisation,
                                 title_query,
                                 search_range="0-25"):
        self.__connect_thehive(url, apikey, organisation)

        # Could be "all" too
        if search_range == "":
            search_range = "0-25"

        response = self.thehive.find_alerts(query=ContainsString(
            "title", title_query),
                                            range=search_range,
                                            sort=[])

        return response.text

    async def create_case(
        self,
        apikey,
        url,
        organisation,
        template,
        title,
        description="",
        tlp=1,
        severity=1,
        tags="",
    ):
        self.__connect_thehive(url, apikey, organisation)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-2, not %s" % tlp
            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 0-2, not %s" % tlp

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 2 or severity < 0:
            return "Severity needs to be a number from 0-2, not %d" % tlp

        Casetemplate = template if template else None

        case = thehive4py.models.Case(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
            template=Casetemplate,
        )

        try:
            ret = self.thehive.create_case(case)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert(
        self,
        apikey,
        url,
        organisation,
        type,
        source,
        sourceref,
        title,
        description="",
        tlp=1,
        severity=1,
        tags="",
        artifacts="",
    ):
        self.__connect_thehive(url, apikey, organisation)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-3, not %s" % tlp

            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 1-3, not %s" % severity

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 3 or severity < 1:
            return "Severity needs to be a number from 1-3, not %d" % severity

        all_artifacts = []
        if artifacts != "":
            # print("ARTIFACTS: %s" % artifacts)
            if isinstance(artifacts, str):
                # print("ITS A STRING!")
                try:
                    artifacts = json.loads(artifacts)
                except:
                    print("[ERROR] Error in parsing artifacts!")

            # print("ART HERE: %s" % artifacts)
            # print("ART: %s" % type(artifacts))
            if isinstance(artifacts, list):
                print("ITS A LIST!")
                for item in artifacts:
                    print("ITEM: %s" % item)
                    try:
                        artifact = thehive4py.models.AlertArtifact(
                            dataType=item["data_type"],
                            data=item["data"],
                        )

                        try:
                            artifact["message"] = item["message"]
                        except:
                            pass

                        if item["data_type"] == "ip":
                            try:
                                if item["is_private_ip"]:
                                    message += " IP is private."
                            except:
                                pass

                        all_artifacts.append(artifact)
                    except KeyError as e:
                        print("Error in artifacts: %s" % e)

        alert = thehive4py.models.Alert(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
            type=type,
            source=source,
            sourceRef=sourceref,
            artifacts=all_artifacts,
        )

        try:
            ret = self.thehive.create_alert(alert)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert_artifact(
        self,
        apikey,
        url,
        organisation,
        alert_id,
        dataType,
        data,
        message=None,
        tlp="2",
        ioc="False",
        sighted="False",
        ignoreSimilarity="False",
        tags=None,
    ):
        self.__connect_thehive(url, apikey, organisation, version=4)

        if tlp:
            tlp = int(tlp)
        else:
            tlp = 2

        ioc = ioc.lower().strip() == "true"
        sighted = sighted.lower().strip() == "true"
        ignoreSimilarity = ignoreSimilarity.lower().strip() == "true"

        if tags:
            tags = [x.strip() for x in tags.split(",")]
        else:
            tags = []

        alert_artifact = thehive4py.models.AlertArtifact(
            dataType=dataType,
            data=data,
            message=message,
            tlp=tlp,
            ioc=ioc,
            sighted=sighted,
            ignoreSimilarity=ignoreSimilarity,
            tags=tags,
        )

        try:
            ret = self.thehive.create_alert_artifact(alert_id, alert_artifact)
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

        if ret.status_code > 299:
            raise ConnectionError(ret.text)

        return ret.text

    # Gets an item based on input. E.g. field_type = Alert
    async def get_item(self, apikey, url, organisation, field_type, cur_id):
        self.__connect_thehive(url, apikey, organisation)

        newstr = ""
        ret = ""
        if field_type.lower() == "alert":
            ret = self.thehive.get_alert(cur_id + "?similarity=1")
        elif field_type.lower() == "case":
            ret = self.thehive.get_case(cur_id)
        elif field_type.lower() == "case_observables":
            ret = self.thehive.get_case_observables(cur_id)
        elif field_type.lower() == "case_task":
            ret = self.thehive.get_case_task(cur_id)
        elif field_type.lower() == "case_tasks":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "case_template":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "linked_cases":
            ret = self.thehive.get_linked_cases(cur_id)
        elif field_type.lower() == "task_log":
            ret = self.thehive.get_task_log(cur_id)
        elif field_type.lower() == "task_logs":
            ret = self.thehive.get_task_logs(cur_id)
        else:
            return (
                "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info."
                % field_type)

        return ret.text

    async def close_alert(self, apikey, url, organisation, alert_id):
        self.__connect_thehive(url, apikey, organisation)
        return self.thehive.mark_alert_as_read(alert_id).text

    async def reopen_alert(self, apikey, url, organisation, alert_id):
        self.__connect_thehive(url, apikey, organisation)
        return self.thehive.mark_alert_as_unread(alert_id).text

    async def create_case_from_alert(self,
                                     apikey,
                                     url,
                                     organisation,
                                     alert_id,
                                     case_template=None):
        self.__connect_thehive(url, apikey, organisation)
        response = self.thehive.promote_alert_to_case(
            alert_id=alert_id, case_template=case_template)
        return response.text

    async def merge_alert_into_case(self, apikey, url, organisation, alert_id,
                                    case_id):
        self.__connect_thehive(url, apikey, organisation)
        req = url + f"/api/alert/{alert_id}/merge/{case_id}"
        ret = requests.post(req, auth=self.thehive.auth)
        return ret.text

    # Not sure what the data should be
    async def update_field(self, apikey, url, organisation, field_type, cur_id,
                           field, data):
        # This is kinda silly but..
        if field_type.lower() == "alert":
            newdata = {}

            if data.startswith("%s"):
                ticket = self.thehive.get_alert(cur_id)
                if ticket.status_code != 200:
                    pass

                newdata[field] = "%s%s" % (ticket.json()[field], data[2:])
            else:
                newdata[field] = data

            # Bleh
            url = "%s/api/alert/%s" % (url, cur_id)
            if field == "status":
                if data == "New" or data == "Updated":
                    url = "%s/markAsUnread" % url
                elif data == "Ignored":
                    url = "%s/markAsRead" % url

                ret = requests.post(
                    url,
                    headers={
                        "Content-Type": "application/json",
                        "Authorization": "Bearer %s" % apikey,
                    },
                )
            else:
                ret = requests.patch(
                    url,
                    headers={
                        "Content-Type": "application/json",
                        "Authorization": "Bearer %s" % apikey,
                    },
                    json=newdata,
                )

            return str(ret.status_code)
        else:
            return (
                "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info."
                % field_type)

    # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex
    async def delete_alert_artifact(self, apikey, url, organisation,
                                    artifact_id):
        self.__connect_thehive(url, apikey, organisation, version=4)
        return self.thehive.delete_alert_artifact(artifact_id).text

    # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex
    async def run_analyzer(self, apikey, url, organisation, cortex_id,
                           analyzer_id, artifact_id):
        self.__connect_thehive(url, apikey, organisation)
        return self.thehive.run_analyzer(cortex_id, artifact_id,
                                         analyzer_id).text

    # Creates a task log in TheHive with file
    async def create_task_log(self,
                              apikey,
                              url,
                              organisation,
                              task_id,
                              message,
                              filedata={}):
        if filedata["success"] == False:
            return "No file to upload. Skipping message."

        headers = {
            "Authorization": "Bearer %s" % apikey,
        }

        files = {}
        if len(filedata["data"]) > 0:
            files = {
                "attachment": (filedata["filename"], filedata["data"]),
            }

        data = {"_json": """{"message": "%s"}""" % message}
        response = requests.post(
            "%s/api/case/task/%s/log" % (url, task_id),
            headers=headers,
            files=files,
            data=data,
        )
        return response.text

    # Creates an observable as a file in a case
    async def create_case_file_observable(self, apikey, url, organisation,
                                          case_id, tags, filedata):
        if filedata["success"] == False:
            return "No file to upload. Skipping message."

        headers = {
            "Authorization": "Bearer %s" % apikey,
        }

        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]

        files = {}
        if len(filedata["data"]) > 0:
            files = {
                "attachment": (filedata["filename"], filedata["data"]),
            }

        outerarray = {"dataType": "file", "tags": tags}
        data = {"_json": """%s""" % json.dumps(outerarray)}
        response = requests.post(
            "%s/api/case/%s/artifact" % (url, case_id),
            headers=headers,
            files=files,
            data=data,
            verify=False,
        )
        return response.text

    # Get all artifacts of a given case
    async def get_case_artifacts(
        self,
        apikey,
        url,
        organisation,
        case_id,
        dataType,
    ):
        self.__connect_thehive(url, apikey, organisation)

        query = And(Eq("dataType", dataType)) if dataType else {}

        # Call the API
        response = self.thehive.get_case_observables(
            case_id, query=query, sort=["-startDate", "+ioc"], range="all")

        # Display the result
        if response.status_code == 200:
            # Get response data
            list = response.json()

            # Display response data
            return (json.dumps(list, indent=4, sort_keys=True)
                    if list else json.dumps(
                        {
                            "status": 200,
                            "message": "No observable results"
                        },
                        indent=4,
                        sort_keys=True,
                    ))
        else:
            return f"Failure: {response.status_code}/{response.text}"

    async def close_case(
        self,
        apikey,
        url,
        organisation,
        id,
        resolution_status="",
        impact_status="",
        summary="",
    ):

        self.__connect_thehive(url, apikey, organisation)
        case = self.thehive.case(id)
        case.status = "Resolved"
        case.summary = summary
        case.resolutionStatus = resolution_status
        case.impactStatus = impact_status

        result = self.thehive.update_case(
            case,
            fields=[
                "status",
                "summary",
                "resolutionStatus",
                "impactStatus",
            ],
        )

        return json.dumps(result.json(), indent=4, sort_keys=True)

    # Update TheHive Case
    async def update_case(
        self,
        apikey,
        url,
        organisation,
        id,
        title="",
        description="",
        severity=None,
        owner="",
        flag=None,
        tlp=None,
        pap=None,
        tags="",
        status="",
        custom_fields=None,
        custom_json=None,
    ):
        self.__connect_thehive(url, apikey, organisation)

        # Get current case data and update fields if new data exists
        case = self.thehive.get_case(id).json()
        print(case)

        case_title = title if title else case["title"]
        case_description = description if description else case["description"]
        case_severity = int(severity) if severity else case["severity"]
        case_owner = owner if owner else case["owner"]
        case_flag = ((False if flag.lower() == "false" else True)
                     if flag else case["flag"])
        case_tlp = int(tlp) if tlp else case["tlp"]
        case_pap = int(pap) if pap else case["pap"]
        case_tags = tags.split(",") if tags else case["tags"]
        case_tags = tags.split(",") if tags else case["tags"]

        case_status = status if status else case["status"]
        case_customFields = case["customFields"]

        # Prepare the customfields
        customfields = CustomFieldHelper()
        if case_customFields:
            for key, value in case_customFields.items():
                if list(value)[0] == "integer":
                    customfields.add_integer(key, list(value.items())[0][1])
                elif list(value)[0] == "string":
                    customfields.add_string(key, list(value.items())[0][1])
                elif list(value)[0] == "boolean":
                    customfields.add_boolean(key, list(value.items())[0][1])
                elif list(value)[0] == "float":
                    customfields.add_float(key, list(value.items())[0][1])
                else:
                    print(
                        f'The value type "{value}" of the field {key} is not suported by the function.'
                    )

        custom_fields = json.loads(custom_fields) if custom_fields else {}
        for key, value in custom_fields.items():
            if type(value) == int:
                customfields.add_integer(key, value)
            elif type(value) == str:
                customfields.add_string(key, value)
            elif type(value) == bool:
                customfields.add_boolean(key, value)
            elif type(value) == float:
                customfields.add_float(key, value)
            else:
                print(
                    f'The value type "{value}" of the field {key} is not suported by the function.'
                )

        customfields = customfields.build()

        custom_json = json.loads(custom_json) if custom_json else {}

        # Prepare the fields to be updated
        case = Case(
            id=id,
            title=case_title,
            description=case_description,
            severity=case_severity,
            owner=case_owner,
            flag=case_flag,
            tlp=case_tlp,
            pap=case_pap,
            tags=case_tags,
            status=case_status,
            customFields=customfields,
            json=custom_json,
        )

        # resolutionStatus=case_resolutionStatus,

        result = self.thehive.update_case(
            case,
            fields=[
                "title",
                "description",
                "severity",
                "owner",
                "flag",
                "tlp",
                "pap",
                "tags",
                "customFields",
                "status",
            ],
        )

        return json.dumps(result.json(), indent=4, sort_keys=True)

    # Get TheHive Organisations
    async def get_organisations(
        self,
        apikey,
        url,
        organisation,
    ):
        headers = {
            "Authorization": f"Bearer {apikey}",
            "Content-Type": "application/json",
        }

        response = requests.get(
            f"{url}/api/organisation",
            headers=headers,
            verify=False,
        )

        return response.text

    # Create TheHive Organisation
    async def create_organisation(
        self,
        apikey,
        url,
        organisation,
        name,
        description,
    ):
        headers = {
            "Authorization": f"Bearer {apikey}",
            "Content-Type": "application/json",
        }

        data = {"name": f"{name}", "description": f"{description}"}

        response = requests.post(
            f"{url}/api/organisation",
            headers=headers,
            json=data,
            verify=False,
        )

        return response.text

    # Create User in TheHive
    async def create_user(
        self,
        apikey,
        url,
        organisation,
        login,
        name,
        profile,
    ):
        headers = {
            "Authorization": f"Bearer {apikey}",
            "Content-Type": "application/json",
        }

        data = {
            "login": f"{login}",
            "name": f"{name}",
            "profile": f"{profile}",
            "organisation": f"{organisation}",
        }

        response = requests.post(
            f"{url}/api/v1/user",
            headers=headers,
            json=data,
            verify=False,
        )

        return response.text
Пример #8
0
class TheHive:
    def __init__(self):
        # Instantiate the connector helper from config
        config_file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/config.yml"
        config = (yaml.load(open(config_file_path), Loader=yaml.SafeLoader)
                  if os.path.isfile(config_file_path) else {})
        self.helper = OpenCTIConnectorHelper(config)
        # Extra config
        self.thehive_url = get_config_variable("THEHIVE_URL",
                                               ["thehive", "url"], config)
        self.thehive_api_key = get_config_variable("THEHIVE_API_KEY",
                                                   ["thehive", "api_key"],
                                                   config)
        self.thehive_check_ssl = get_config_variable("THEHIVE_CHECK_SSL",
                                                     ["thehive", "check_ssl"],
                                                     config, False, True)
        self.thehive_organization_name = get_config_variable(
            "THEHIVE_ORGANIZATION_NAME", ["thehive", "organization_name"],
            config)
        self.thehive_import_from_date = get_config_variable(
            "THEHIVE_IMPORT_FROM_DATE",
            ["thehive", "import_from_date"],
            config,
            False,
            datetime.utcfromtimestamp(int(
                time.time())).strftime("%Y-%m-%d %H:%M:%S"),
        )
        self.update_existing_data = get_config_variable(
            "CONNECTOR_UPDATE_EXISTING_DATA",
            ["connector", "update_existing_data"],
            config,
        )
        self.identity = self.helper.api.identity.create(
            type="Organization",
            name=self.thehive_organization_name,
            description=self.thehive_organization_name,
        )
        self.thehive_api = TheHiveApi(self.thehive_url,
                                      self.thehive_api_key,
                                      cert=self.thehive_check_ssl)

    def generate_case_bundle(self, case):
        markings = []
        if case["tlp"] == 0:
            markings.append(TLP_WHITE)
        if case["tlp"] == 1:
            markings.append(TLP_GREEN)
        if case["tlp"] == 2:
            markings.append(TLP_AMBER)
        if case["tlp"] == 3:
            markings.append(TLP_RED)
        if len(markings) == 0:
            markings.append(TLP_WHITE)
        bundle_objects = []
        incident = StixXOpenCTIIncident(
            id=OpenCTIStix2Utils.generate_random_stix_id("x-opencti-incident"),
            name=case["title"],
            description=case["description"],
            first_seen=datetime.utcfromtimestamp(
                int(case["createdAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"),
            last_seen=datetime.utcfromtimestamp(
                int(case["updatedAt"]) / 1000).strftime("%Y-%m-%dT%H:%M:%SZ"),
            object_marking_refs=markings,
            labels=case["tags"] if "tags" in case else [],
            created_by_ref=self.identity["standard_id"],
        )
        bundle_objects.append(incident)
        # Get observables
        observables = self.thehive_api.get_case_observables(
            case_id=case["id"]).json()
        for observable in observables:
            if observable["dataType"] == "hash":
                if len(observable["data"]) == 32:
                    data_type = "file_md5"
                elif len(observable["data"]) == 40:
                    data_type = "file_sha1"
                elif len(observable["data"]) == 64:
                    data_type = "file_sha256"
                else:
                    data_type = "unknown"
            else:
                data_type = observable["dataType"]
            observable_key = OBSERVABLES_MAPPING[data_type]
            if observable_key is not None:
                stix_observable = SimpleObservable(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "x-opencti-simple-observable"),
                    key=observable_key,
                    value=observable["data"],
                    description=observable["message"],
                    x_opencti_score=80 if observable["ioc"] else 50,
                    object_marking_refs=markings,
                    labels=observable["tags"] if "tags" in observable else [],
                    created_by_ref=self.identity["standard_id"],
                    x_opencti_create_indicator=observable["ioc"],
                )
                stix_observable_relation = Relationship(
                    id=OpenCTIStix2Utils.generate_random_stix_id(
                        "relationship"),
                    relationship_type="related-to",
                    created_by_ref=self.identity["standard_id"],
                    source_ref=stix_observable.id,
                    target_ref=incident.id,
                    object_marking_refs=markings,
                )
                bundle_objects.append(stix_observable)
                bundle_objects.append(stix_observable_relation)
                if observable["sighted"]:
                    fake_indicator_id = (
                        "indicator--c1034564-a9fb-429b-a1c1-c80116cc8e1e")
                    stix_sighting = Sighting(
                        id=OpenCTIStix2Utils.generate_random_stix_id(
                            "sighting"),
                        first_seen=datetime.utcfromtimestamp(
                            int(observable["startDate"] /
                                1000)).strftime("%Y-%m-%dT%H:%M:%SZ"),
                        last_seen=datetime.utcfromtimestamp(
                            int(observable["startDate"] / 1000 +
                                3600)).strftime("%Y-%m-%dT%H:%M:%SZ"),
                        where_sighted_refs=[self.identity["standard_id"]],
                        sighting_of_ref=fake_indicator_id,
                        custom_properties={
                            "x_opencti_sighting_of_ref": stix_observable.id
                        },
                    )
                    bundle_objects.append(stix_sighting)
        bundle = Bundle(objects=bundle_objects).serialize()
        return bundle

    def run(self):
        self.helper.log_info("Starting TheHive Connector...")
        while True:
            try:
                # Get the current timestamp and check
                timestamp = int(time.time())
                current_state = self.helper.get_state()
                if current_state is not None and "last_case_date" in current_state:
                    last_case_date = current_state["last_case_date"]
                    self.helper.log_info(
                        "Connector last_case_date: " +
                        datetime.utcfromtimestamp(last_case_date).strftime(
                            "%Y-%m-%d %H:%M:%S"))
                else:
                    last_case_date = parse(
                        self.thehive_import_from_date).timestamp()
                    self.helper.log_info("Connector has no last_case_date")

                self.helper.log_info("Get cases since last run (" +
                                     datetime.utcfromtimestamp(last_case_date).
                                     strftime("%Y-%m-%d %H:%M:%S") + ")")
                query = Or(
                    Gt("updatedAt", int(last_case_date * 1000)),
                    Child("case_task",
                          Gt("createdAt", int(last_case_date * 1000))),
                    Child("case_artifact",
                          Gt("createdAt", int(last_case_date * 1000))),
                )
                cases = self.thehive_api.find_cases(query=query,
                                                    sort="updatedAt",
                                                    range="0-100").json()
                now = datetime.utcfromtimestamp(timestamp)
                friendly_name = "TheHive run @ " + now.strftime(
                    "%Y-%m-%d %H:%M:%S")
                work_id = self.helper.api.work.initiate_work(
                    self.helper.connect_id, friendly_name)
                try:
                    for case in cases:
                        stix_bundle = self.generate_case_bundle(case)
                        self.helper.send_stix2_bundle(
                            stix_bundle,
                            update=self.update_existing_data,
                            work_id=work_id,
                        )
                except Exception as e:
                    self.helper.log_error(str(e))
                # Store the current timestamp as a last run
                message = "Connector successfully run, storing last_run as " + str(
                    timestamp)
                self.helper.log_info(message)
                self.helper.api.work.to_processed(work_id, message)
                current_state = self.helper.get_state()
                if current_state is None:
                    current_state = {"last_case_date": timestamp}
                else:
                    current_state["last_case_date"] = timestamp
                self.helper.set_state(current_state)
                time.sleep(60)
            except (KeyboardInterrupt, SystemExit):
                self.helper.log_info("Connector stop")
                exit(0)
            except Exception as e:
                self.helper.log_error(str(e))
                time.sleep(60)
Пример #9
0
class TheHive(AppBase):
    """
    An example of a Walkoff App.
    Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes.
    """

    __version__ = "1.0.0"
    app_name = "thehive"

    def __init__(self, redis, logger, console_logger=None):
        """
        Each app should have this __init__ to set up Redis and logging.
        :param redis:
        :param logger:
        :param console_logger:
        """
        super().__init__(redis, logger, console_logger)

    # async def run_analyzer(self, apikey, url, title_query):
    #    self.thehive = TheHiveApi(url, apikey, cert=False)

    #    response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[])
    #    return response.text

    async def search_cases(self, apikey, url, title_query):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        response = self.thehive.find_cases(query=ContainsString(
            "title", title_query),
                                           range="all",
                                           sort=[])
        return response.text

    async def search_query(self, apikey, url, search_for, custom_query):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        try:
            query = json.loads(custom_query)
        except:
            raise IOError("Invalid JSON payload received.")

        if search_for == "alert":
            response = self.thehive.find_alerts(query=query,
                                                range="all",
                                                sort=[])
        else:
            response = self.thehive.find_cases(query=query,
                                               range="all",
                                               sort=[])

        if response.status_code == 200:
            return response.text
        else:
            raise IOError(response.text)

    async def add_observable(self, apikey, url, case_id, data, datatype, tags):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        item = thehive4py.models.CaseObservable(
            dataType=datatype,
            data=data,
            tlp=1,
            ioc=False,
            sighted=False,
            tags=["Shuffle"],
            message="Created by shuffle",
        )

        return self.thehive.create_case_observable(case_id, item).text

    async def search_alerts(self,
                            apikey,
                            url,
                            title_query,
                            search_range="0-25"):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        # Could be "all" too
        if search_range == "":
            search_range = "0-25"

        response = self.thehive.find_alerts(query=ContainsString(
            "title", title_query),
                                            range=search_range,
                                            sort=[])
        return response.text

    async def create_case(self,
                          apikey,
                          url,
                          title,
                          description="",
                          tlp=1,
                          severity=1,
                          tags=""):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-2, not %s" % tlp
            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 0-2, not %s" % tlp

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 2 or severity < 0:
            return "Severity needs to be a number from 0-2, not %d" % tlp

        case = thehive4py.models.Case(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
        )

        try:
            ret = self.thehive.create_case(case)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert(
        self,
        apikey,
        url,
        type,
        source,
        sourceref,
        title,
        description="",
        tlp=1,
        severity=1,
        tags="",
    ):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-3, not %s" % tlp

            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 1-3, not %s" % severity

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 3 or severity < 1:
            return "Severity needs to be a number from 1-3, not %d" % severity

        alert = thehive4py.models.Alert(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
            type=type,
            source=source,
            sourceRef=sourceref,
        )

        try:
            ret = self.thehive.create_alert(alert)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert_artifact(self,
                                    apikey,
                                    url,
                                    alert_id,
                                    dataType,
                                    data,
                                    message=None,
                                    tlp="2",
                                    ioc="False",
                                    sighted="False",
                                    ignoreSimilarity="False",
                                    tags=None):
        self.thehive = TheHiveApi(url, apikey, cert=False, version=4)

        if tlp:
            tlp = int(tlp)
        else:
            tlp = 2

        ioc = ioc.lower().strip() == "true"
        sighted = sighted.lower().strip() == "true"
        ignoreSimilarity = ignoreSimilarity.lower().strip() == "true"

        if tags:
            tags = [x.strip() for x in tags.split(",")]
        else:
            tags = []

        alert_artifact = thehive4py.models.AlertArtifact(
            dataType=dataType,
            data=data,
            message=message,
            tlp=tlp,
            ioc=ioc,
            sighted=sighted,
            ignoreSimilarity=ignoreSimilarity,
            tags=tags)

        try:
            ret = self.thehive.create_alert_artifact(alert_id, alert_artifact)
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e
        if ret.status_code > 299:
            raise ConnectionError(ret.text)

        return ret.text

    # Gets an item based on input. E.g. field_type = Alert
    async def get_item(self, apikey, url, field_type, cur_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)

        newstr = ""
        ret = ""
        if field_type.lower() == "alert":
            ret = self.thehive.get_alert(cur_id + "?similarity=1")
        elif field_type.lower() == "case":
            ret = self.thehive.get_case(cur_id)
        elif field_type.lower() == "case_observables":
            ret = self.thehive.get_case_observables(cur_id)
        elif field_type.lower() == "case_task":
            ret = self.thehive.get_case_task(cur_id)
        elif field_type.lower() == "case_tasks":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "case_template":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "linked_cases":
            ret = self.thehive.get_linked_cases(cur_id)
        elif field_type.lower() == "task_log":
            ret = self.thehive.get_task_log(cur_id)
        elif field_type.lower() == "task_logs":
            ret = self.thehive.get_task_logs(cur_id)
        else:
            return (
                "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info."
                % field_type)

        return ret.text

    async def close_alert(self, apikey, url, alert_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        return self.thehive.mark_alert_as_read(alert_id).text

    async def reopen_alert(self, apikey, url, alert_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        return self.thehive.mark_alert_as_unread(alert_id).text

    async def create_case_from_alert(self,
                                     apikey,
                                     url,
                                     alert_id,
                                     case_template=None):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        response = self.thehive.promote_alert_to_case(
            alert_id=alert_id, case_template=case_template)
        return response.text

    async def merge_alert_into_case(self, apikey, url, alert_id, case_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        req = url + f"/api/alert/{alert_id}/merge/{case_id}"
        ret = requests.post(req, auth=self.thehive.auth)
        return ret.text

    # Not sure what the data should be
    async def update_field(self, apikey, url, field_type, cur_id, field, data):
        # This is kinda silly but..
        if field_type.lower() == "alert":
            newdata = {}

            if data.startswith("%s"):
                ticket = self.thehive.get_alert(cur_id)
                if ticket.status_code != 200:
                    pass

                newdata[field] = "%s%s" % (ticket.json()[field], data[2:])
            else:
                newdata[field] = data

            # Bleh
            url = "%s/api/alert/%s" % (url, cur_id)
            if field == "status":
                if data == "New" or data == "Updated":
                    url = "%s/markAsUnread" % url
                elif data == "Ignored":
                    url = "%s/markAsRead" % url

                ret = requests.post(
                    url,
                    headers={
                        "Content-Type": "application/json",
                        "Authorization": "Bearer %s" % apikey,
                    },
                )
            else:
                ret = requests.patch(
                    url,
                    headers={
                        "Content-Type": "application/json",
                        "Authorization": "Bearer %s" % apikey,
                    },
                    json=newdata,
                )

            return str(ret.status_code)
        else:
            return (
                "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info."
                % field_type)

    # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex
    async def run_analyzer(self, apikey, url, cortex_id, analyzer_id,
                           artifact_id):
        self.thehive = TheHiveApi(url, apikey, cert=False)
        return self.thehive.run_analyzer(cortex_id, artifact_id,
                                         analyzer_id).text

    # Creates a task log in TheHive with file
    async def create_task_log(self,
                              apikey,
                              url,
                              task_id,
                              message,
                              filedata={}):
        if filedata["success"] == False:
            return "No file to upload. Skipping message."

        headers = {
            "Authorization": "Bearer %s" % apikey,
        }

        files = {}
        if len(filedata["data"]) > 0:
            files = {
                "attachment": (filedata["filename"], filedata["data"]),
            }

        data = {"_json": """{"message": "%s"}""" % message}
        response = requests.post(
            "%s/api/case/task/%s/log" % (url, task_id),
            headers=headers,
            files=files,
            data=data,
        )
        return response.text

    # Creates an observable as a file in a case
    async def create_case_file_observable(self, apikey, url, case_id, tags,
                                          filedata):
        if filedata["success"] == False:
            return "No file to upload. Skipping message."

        headers = {
            "Authorization": "Bearer %s" % apikey,
        }

        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = [tags]

        files = {}
        if len(filedata["data"]) > 0:
            files = {
                "attachment": (filedata["filename"], filedata["data"]),
            }

        outerarray = {"dataType": "file", "tags": tags}
        data = {"_json": """%s""" % json.dumps(outerarray)}
        response = requests.post(
            "%s/api/case/%s/artifact" % (url, case_id),
            headers=headers,
            files=files,
            data=data,
        )
        return response.text
Пример #10
0
class TheHive(AppBase):
    """
    An example of a Walkoff App.
    Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes.
    """
    __version__ = "1.0.0"
    app_name = "thehive"

    def __init__(self, redis, logger, console_logger=None):
        """
        Each app should have this __init__ to set up Redis and logging.
        :param redis:
        :param logger:
        :param console_logger:
        """
        super().__init__(redis, logger, console_logger)

    #async def run_analyzer(self, apikey, url, title_query):
    #    self.thehive = TheHiveApi(url, apikey)

    #    response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[])
    #    return response.text


    async def search_cases(self, apikey, url, title_query):
        self.thehive = TheHiveApi(url, apikey)

        response = self.thehive.find_cases(query=String("title:'%s'" % title_query), range='all', sort=[])
        return response.text

    async def add_observable(self, apikey, url, case_id, data, datatype, tags):
        self.thehive = TheHiveApi(url, apikey)

        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = []
        else:
            tags = []

        item = thehive4py.models.CaseObservable(
            dataType=datatype,
            data=data,
            tlp=1,
            ioc=False,
            sighted=False,
            tags=["Shuffle"],
            message="Created by shuffle",
        )

        return self.thehive.create_case_observable(case_id, item).text

    async def search_alerts(self, apikey, url, title_query, search_range="0-25"):
        self.thehive = TheHiveApi(url, apikey)

        # Could be "all" too
        if search_range == "":
            search_range = "0-25"

        response = self.thehive.find_alerts(query=String("title:'%s'" % title_query), range=search_range, sort=[])
        return response.text

    async def create_case(self, apikey, url, title, description="", tlp=1, severity=1, tags=""):
        self.thehive = TheHiveApi(url, apikey)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = []
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-2, not %s" % tlp
            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 0-2, not %s" % tlp

            severity = int(severity)

        if tlp > 3 or tlp < 0:
            return "TLP needs to be a number from 0-3, not %d" % tlp
        if severity > 2 or severity < 0:
            return "Severity needs to be a number from 0-2, not %d" % tlp

        case = thehive4py.models.Case(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
        )

        try:
            ret = self.thehive.create_case(case)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    async def create_alert(self, apikey, url, type, source, sourceref, title, description="", tlp=1, severity=1, tags=""):
        self.thehive = TheHiveApi(url, apikey)
        if tags:
            if ", " in tags:
                tags = tags.split(", ")
            elif "," in tags:
                tags = tags.split(",")
            else:
                tags = []
        else:
            tags = []

        # Wutface fix
        if not tlp:
            tlp = 1
        if not severity:
            severity = 1

        if isinstance(tlp, str):
            if not tlp.isdigit():
                return "TLP needs to be a number from 0-2, not %s" % tlp
            tlp = int(tlp)
        if isinstance(severity, str):
            if not severity.isdigit():
                return "Severity needs to be a number from 0-2, not %s" % tlp

            severity = int(severity)

        if tlp > 2 or tlp < 0:
            return "TLP needs to be a number from 0-2, not %d" % tlp
        if severity > 2 or severity < 0:
            return "Severity needs to be a number from 0-2, not %d" % tlp

        alert = thehive4py.models.Alert(
            title=title,
            tlp=tlp,
            severity=severity,
            tags=tags,
            description=description,
            type=type,
            source=source,
            sourceRef=sourceref,
        )

        try:
            ret = self.thehive.create_alert(alert)
            return ret.text
        except requests.exceptions.ConnectionError as e:
            return "ConnectionError: %s" % e

    # Gets an item based on input. E.g. field_type = Alert
    async def get_item(self, apikey, url, field_type, cur_id): 
        self.thehive = TheHiveApi(url, apikey)

        newstr = ""
        ret = ""
        if field_type.lower() == "alert":
            ret = self.thehive.get_alert(cur_id + "?similarity=1") 
        elif field_type.lower() == "case":
            ret = self.thehive.get_case(cur_id)
        elif field_type.lower() == "case_observables":
            ret = self.thehive.get_case_observables(cur_id)
        elif field_type.lower() == "case_task":
            ret = self.thehive.get_case_task(cur_id)
        elif field_type.lower() == "case_tasks":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "case_template":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "linked_cases":
            ret = self.thehive.get_linked_cases(cur_id)
        elif field_type.lower() == "task_log":
            ret = self.thehive.get_task_log(cur_id)
        elif field_type.lower() == "task_logs":
            ret = self.thehive.get_task_logs(cur_id)
        else:
            return "%s is not implemented. See https://github.com/frikky/shuffle-apps for more info." % field_type

        return ret.text

    async def close_alert(self, apikey, url, alert_id):
        self.thehive = TheHiveApi(url, apikey)
        return self.thehive.mark_alert_as_read(alert_id).text

    async def reopen_alert(self, apikey, url, alert_id):
        self.thehive = TheHiveApi(url, apikey)
        return self.thehive.mark_alert_as_unread(alert_id).text

    async def create_case_from_alert(self, apikey, url, alert_id, case_template=None):
        self.thehive = TheHiveApi(url, apikey)
        response = self.thehive.promote_alert_to_case(alert_id=alert_id, case_template=case_template)
        return response.text

    async def merge_alert_into_case(self, apikey, url, alert_id, case_id):
        self.thehive = TheHiveApi(url, apikey)
        req = url + f"/api/alert/{alert_id}/merge/{case_id}"
        ret = requests.post(req, auth=self.thehive.auth)
        return ret.text

    # Not sure what the data should be
    async def update_field(self, apikey, url, field_type, cur_id, field, data):
        # This is kinda silly but..
        if field_type.lower() == "alert":
            newdata = {}

            if data.startswith("%s"): 
                ticket = self.thehive.get_alert(cur_id)
                if ticket.status_code != 200:
                    pass 
            
                newdata[field] = "%s%s" % (ticket.json()[field], data[2:])
            else:
                newdata[field] = data

            # Bleh
            url = "%s/api/alert/%s" % (url, cur_id)
            if field == "status":
                if data == "New" or data == "Updated":
                    url = "%s/markAsUnread" % url
                elif data == "Ignored": 
                    url = "%s/markAsRead" % url

                ret = requests.post(
                    url,
                    headers={
                        'Content-Type': 'application/json',
                        'Authorization': 'Bearer %s' % apikey
                    }
                )
            else:
                ret = requests.patch(
                    url,
                    headers={
                        'Content-Type': 'application/json',
                        'Authorization': 'Bearer %s' % apikey
                    }, 
                    json=newdata,
                )

            return str(ret.status_code)
        else:
            return "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type

    # https://github.com/TheHive-Project/TheHiveDocs/tree/master/api/connectors/cortex
    async def run_analyzer(self, apikey, url, cortex_id, analyzer_id, artifact_id):
        self.thehive = TheHiveApi(url, apikey)
        return self.thehive.run_analyzer(cortex_id, artifact_id, analyzer_id).text
Пример #11
0
        print("fubard")
    update_sirp(data)


def update_sirp(data):
    """Auto Closes The Hive cases that meet criteria

    Posts case closure
    """
    i = 0
    while i < len(data):
        if data[i]["SentinelResolved"] is True:
            API.case.update(
                data[i]["sirpId"],
                status="Resolved",
                resolutionStatus="Other",
                summary="Resolved at Sentinel One Console, autoclosed",
                tags=["SentinelOne API"],
            )
        else:
            pass
        i += 1


RESPONSE = API.find_cases(query=And(Eq("status", "Open"),
                                    Eq("owner", "sentinelone")),
                          range="all",
                          sort=[])
check_status(RESPONSE)
sys.exit()
Пример #12
0
class TheHiveScheduledSearches:
    def __init__(self, TheHive, QRadar):
        #Retrieve enabled datatypes from config
        self.qr_enabled_datatypes = QRadar['enabled_datatypes']

        #Add epoch in milliseconds of current time to a variable
        self.current_time = int(round(time.time() * 1000))

        #Assign The Hive API class
        self.thapi = TheHiveApi(TheHive.get('url', None), TheHive.get('key'),
                                TheHive.get('password', None),
                                TheHive.get('proxies'), TheHive.get('verify'))

    #Generic function to check the response from the hive
    def check_response(self, response):
        logger.debug('API TheHive - status code: {}'.format(
            response.status_code))
        if response.status_code > 299:
            logger.error('API TheHive - raw error output: {}'.format(
                response.raw.read()))
        logger.debug('Response: %s' % response.text)

    def observable_search(self):
        #Search for cases with first_searched
        logger.info('Searching for matching cases')
        self.query = Contains('customFields.firstSearched.date')
        self.response = self.thapi.find_cases(query=self.query)
        logger.debug('Response: %s' % self.response.text)

        #Compare first_searched and last_searched. If longer than 60 days. Do not search.
        for case_data in self.response.json():
            queue_item = {}
            #Create observable queue item
            queue_item['action'] = "search_observables"
            queue_item['data'] = case_data
            #Add case to the queue
            self.thapi_queue(queue_item)
        self.process_queue()
        while q.qsize() > 0:
            logger.info('Current queue size(%i)' % q.qsize())
            time.sleep(60)

    #Define the logic that makes it possible to perform asynchronous requests to The Hive in order to speed up the integration
    def thapi_queue(self, queued_request):

        #Build up the queue
        logger.info(
            'Adding action: %s to queue for: %s (Current queue length: %i)' %
            (queued_request['action'], queued_request['data']['id'],
             q.qsize()))
        q.put(queued_request)

    def process_queue(self):
        #Create the first thread
        thread_count = threading.active_count()
        if thread_count <= 1:
            logger.info('Creating thread')
            t = Thread(target=self.doWork)
            t.daemon = True
            t.start()
            logger.debug('Created thread')

    #Define the functionality each workers gets
    def doWork(self):
        #Build a loop that keeps the thread alive until queue is empty
        while not q.empty():
            #Build up the threads
            thread_count = threading.active_count()
            #Make sure that the thread count is lower than configured limit and is lower than the queue size
            if thread_count < concurrent and thread_count < q.qsize():
                new_thread_count = thread_count + 1
                logger.info(
                    'Current queue size(%i) allows more threads. Creating additional thread: %i'
                    % (q.qsize(), new_thread_count))
                t = Thread(target=self.doWork)
                t.daemon = True
                t.start()
                logger.debug('Created thread: %i' % new_thread_count)

            #Retrieve a queued item
            queued_item = q.get()

            #Handle a queued item based on its provided action
            if queued_item['action'] == "search_observables":
                logger.info('Working on %s from queue, caseid: %s' %
                            (queued_item['action'], queued_item['data']['id']))

                case_data = queued_item['data']
                logger.debug("event: %s" % case_data)
                #Store CaseID
                caseid = case_data['id']

                #If the case is within scope of the search range. Perform the search
                #if (case_data['customFields']['lastSearched']['date'] - case_data['customFields']['firstSearched']['date']) < 5184000000:
                #logger.info('Observables in case {} have not yet been searched for longer than two months. Starting analyzers'.format(case_data['id']))
                self.response = self.thapi.get_case_observables(caseid)

                #Perform a search for ioc's per case in the RS search results (Trigger Cortex Analyzer)
                for observable in self.response.json():
                    searched_for = False
                    logger.debug("observable: %s" % observable)
                    logger.debug("current_time %s, observable_time %s" %
                                 (self.current_time, observable['startDate']))
                    #Check if observables are not older than 2 months or 6 months for TLP:RED
                    if (((self.current_time - observable['startDate']) <
                         5184000000)
                            or (observable['tlp'] == 3 and
                                ((self.current_time - observable['startDate'])
                                 < 15552000))):
                        self.searchtype = observable['dataType']
                        if self.searchtype in self.qr_enabled_datatypes:
                            self.supported_observable = observable['_id']

                            #Trigger a search for the supported ioc
                            logger.info(
                                'Launching analyzers for observable: {}'.
                                format(self.supported_observable))
                            self.response = self.thapi.run_analyzer(
                                "Cortex-intern", self.supported_observable,
                                "IBMQRadar_Search_Automated_0_1")
                            self.check_response(self.response)
                            searched_for = True

                if searched_for:
                    #Add customFields firstSearched and lastSearched
                    #Create a Case object? Or whatever it is
                    self.case = Case()

                    #Add the case id to the object
                    self.case.id = caseid

                    #Debug output
                    logger.info('Updating case %s' % self.case.id)

                    #Define which fields need to get updated
                    fields = ['customFields']

                    #Retrieve all required attributes from the alert and add them as custom fields to the case
                    self.customFields = CustomFieldHelper()\
                        .add_date('firstSearched', case_data['customFields']['firstSearched']['date'])\
                        .add_date('lastSearched', self.current_time)\
                        .build()

                    #Add custom fields to the case object
                    self.case.customFields = self.customFields

                    #Update the case
                    self.response = self.thapi.update_case(self.case, fields)
                    self.check_response(self.response)

        logger.info("Queue is empty, nothing left to do")
Пример #13
0
class TheHive(AppBase):
    """
    An example of a Walkoff App.
    Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes.
    """
    __version__ = "0.0.3"
    app_name = "thehive"

    def __init__(self, redis, logger, console_logger=None):
        """
        Each app should have this __init__ to set up Redis and logging.
        :param redis:
        :param logger:
        :param console_logger:
        """
        self.thehive = TheHiveApi(secret.url, secret.apikey)
        super().__init__(redis, logger, console_logger)

    async def show_secret(self):
        return "url=%s, apikey=%s" % (secret.url, secret.apikey)

    async def get_case_count(self, title_query):
        response = self.thehive.find_cases(query=String("title:'%s'" %
                                                        title_query),
                                           range='all',
                                           sort=[])
        casecnt = len(response.json())
        return casecnt

    async def string_contains(self, field, string_check):
        if string_check in field.lower():
            return True

        return False

    async def string_startswith(self, field, string_check):
        if field.lower().startswith(string_check):
            return True

        return False

    # Gets an item based on input. E.g. field_type = Alert
    async def get_item(self, field_type, cur_id):
        newstr = ""
        ret = ""
        if field_type.lower() == "alert":
            ret = self.thehive.get_alert(cur_id)
        elif field_type.lower() == "case":
            ret = self.thehive.get_case(cur_id)
        elif field_type.lower() == "case_observables":
            ret = self.thehive.get_case_observables(cur_id)
        elif field_type.lower() == "case_task":
            ret = self.thehive.get_case_task(cur_id)
        elif field_type.lower() == "case_tasks":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "case_template":
            ret = self.thehive.get_case_tasks(cur_id)
        elif field_type.lower() == "linked_cases":
            ret = self.thehive.get_linked_cases(cur_id)
        elif field_type.lower() == "task_log":
            ret = self.thehive.get_task_log(cur_id)
        elif field_type.lower() == "task_logs":
            ret = self.thehive.get_task_logs(cur_id)
        else:
            return "%s is not implemented. See https://github.com/frikky/walkoff-integrations for more info." % field_type

        newstr = str(ret.json()).replace("\'", "\"")
        newstr = newstr.replace("True", "true")
        newstr = newstr.replace("False", "false")
        return newstr

    # Not sure what the data should be
    async def update_field_string(self, field_type, cur_id, field, data):
        # This is kinda silly but..
        if field_type.lower() == "alert":
            newdata = {}

            if data.startswith("%s"):
                ticket = self.thehive.get_alert(cur_id)
                if ticket.status_code != 200:
                    pass

                newdata[field] = "%s%s" % (ticket.json()[field], data[2:])
            else:
                newdata[field] = data

            # Bleh
            url = "%s/api/alert/%s" % (secret.url, cur_id)
            if field == "status":
                if data == "New" or data == "Updated":
                    url = "%s/markAsUnread" % url
                elif data == "Ignored":
                    url = "%s/markAsRead" % url

                ret = requests.post(url,
                                    headers={
                                        'Content-Type':
                                        'application/json',
                                        'Authorization':
                                        'Bearer %s' % secret.apikey
                                    })
            else:
                ret = requests.patch(
                    url,
                    headers={
                        'Content-Type': 'application/json',
                        'Authorization': 'Bearer %s' % secret.apikey
                    },
                    json=newdata,
                )

            return ret.status_code
        else:
            return 0
Пример #14
0
class Offense(object):
    """
        Class used for handling offenses and customers. 
        Uses customer.py to handle each and every customer in the configuration file.
    """

    def __init__(self):
        self.customers = []
        self.db_status = False
        if cfg.TheHive:
            self.hive = TheHiveApi("http://%s" % cfg.hiveip, cfg.hiveusername, 
                            cfg.hivepassword, {"http": "", "https": ""})
            self.cortex_log_path = "log/cortex_analysis.log"
            self.cortex_listener = cortex_listen(self.cortex_log_path)

    # Function only in use when either customer_values.db does not exists or is empty
    def db_setup(self):
        """
	    Creates db for a customer if it doesn't exist.	
        """
        database = "%s/database/customer_values.db" % dir_path
        if not os.path.isfile(database):
            open(database, 'w+').close()

        try:
            self.db = pickledb.load(database, False)
        except pickledb.simplejson.scanner.JSONDecodeError:
            # Remove file, and recreate
            os.remove(database)
            logging.info("Creating database")
            self.db = pickledb.load(database, False)
        
    # Creates folders for customers.
    def create_customer_folder(self, customer_name):
        """
	    Creates a directory for a customer to save offenses. Used for backlogging.
        """
        customer_dir = "%s/database/customers/%s" % (dir_path, customer_name )
        if not os.path.exists(customer_dir):
            os.makedirs(customer_dir)

        
    # Creates database for customer if it doesnt exist and SEC token exists
    def create_db(self, name):
        """
	    Uses pickledb to keep track of latest offenses.
        """
        self.db_setup()
        self.create_customer_folder(name)
        if not name in self.db.getall():
            self.db.lcreate(name)
            self.db.ladd(name, 0)
            self.db.set(name+"_counter", 0)
            self.db.set(name+"_status_code", 200)
            self.db.set(name+"_code_status", 0)
            self.db.dump()
            logging.info("%s Initialized database for %s" % (self.get_time, name))
            return False
        return True

    # Gets current time for print format.
    def get_time(self):
		# Workaround for wrong time
        hourstr = time.strftime("%H")
        hourint = int(hourstr)+2
        return "%d:%s" % (hourint, time.strftime("%M:%S"))

    # Reloading the complete customers object for every iteration
    def add_customers(self, customer_json):
        """
			Creates customer object => Loops through each and every one 
			and verifies if they exist or not in the customer list. (self.customers)
        """

        self.customers = []
        # Catches exception related to unbound variables
        try:
            for item in customer_json:
                try:
                    # Verifies Json data
                    if item['SEC'] and len(item['SEC']) is 36:
                        a = Customer(item['name'], item['SEC'], \
                                     item['target'], item['version'], \
                                     item['rules'], item['subnet'], \
                                     item['cert'], item['domain'])
                        logging.info("%s: Customer %s added/reloaded to customer" % (self.get_time(), item['name']))
                        self.create_db(item['name'])
                        self.customers.append(a)
                    else:
                        logging.info("%s: No SEC token found for %s" % (self.get_time(), item['name']))
                except KeyError as e:
                    logging.warning("%s: Bad key: %s" % (self.get_time(), e))
                    continue
        except UnboundLocalError:
            return

    # Checks if the json is valid with expected inputs
    def load_objects(self, customers = []):
        """
			Verifies if the JSON interpreted contains errors and if it should be refreshed or not.
			THis function exists to make real-time addition of new customers possible.
        """
        global json_time
        file = "%s/database/customer.json" % dir_path

        # Verifies if file has been edited.
        if os.path.getmtime(file) > json_time:
            json_time = os.path.getmtime(file)
            msg = "%s: Reloading %s because of timedifference" % (self.get_time(), file)
            if len(sys.argv) > 1:
                if sys.argv[1] == "--verbose" or sys.argv[1] == "-v":	
                	print(msg)

            self.write_offense_log(msg) 

            logging.info("%s: Reloading %s because of timedifference" % (self.get_time(), file))
        else:
            logging.info("%s: No changes made to %s" % (self.get_time(), file))
            return

        try:
            with open(file, 'r') as tmp: 
                #self.verify_json(open(file, 'r'))
                customer_json = json.loads(tmp.read())
        except IOError as e:
            logging.info("%s: %s" % (self.get_time(), e))
            return
        except ValueError as e:
            logging.info("%s: %s" % (self.get_time(), e))
            return 

        # Create customer info 
        customer_value = self.add_customers(customer_json)
        return customer_value
        
    # Uses Sveve for SMS sending
    def send_sms(self, message):
        """
	    Originally made to send an SMS with the message variable to a specific number.
        """
        logging.info("%s: %s" % (self.get_time(), "Attempting to send sms"))

        if isinstance(message, dict):
            message = "\n".join(message['categories'])

        passwd=""

        # Measure to not make api calls for SMS service.
        if not passwd:
            logging.info("%s: %s" % (self.get_time(), "Aborting sms sending"))
            return

        username = "******"
        url = "https://sveve.no/SMS/SendMessage?"
        target = ""
        sender = "IT ME"

        tot_url = "%suser=%s&passwd=%s&to=%s&from=%s&msg=%s - %s" % (url, username, passwd, target, sender,  message['id'], message)
        tot_url += "%20SMS"
        logging.info("%s: should send alarm for ****\n%s" % (self.get_time(), tot_url))

        try:
            request = requests.get(tot_url, timeout=5, verify=False)
        except (requests.exceptions.ConnectionError,\
                requests.exceptions.ReadTimeout,\
                AttributeError) as e:
            logging.warning("%s: %s" % (self.get_time(), e))

        return 

    # Runs the alarm
    def run_alarm(self, item, customer):
        """
	    Originally used to control on-screen offenses, but later found to be annoying.
        """
        logging.info("%s: New highest offense - %s - customer %s, %s" % \
            (self.get_time(), item['id'], customer.name, item['categories']))

        if self.db.get(customer.name+"_counter") is 0:
            self.db.set(customer.name+"_counter", \
            int(self.db.get(customer.name+"_counter"))+1)
            return

        logging.warning("%s: Sending alarm to %s" % (self.get_time(), customer.name))
        new_data = urllib.quote("Offense #%s: %s" % \
                            (item['id'], "\n".join(item['categories'])))

        # Return to only get one alarm at a time per customer.
        return False

    def reverse_list(self, customer, request):
        """
			Reverses a list. QRadar API > 7.0 wasn't stable.
        """
        tmp_arr = []
        if not customer.new_version:
            for i in range(len(request.json())-1, -1, -1):
                tmp_arr.append(request.json()[i])
            return tmp_arr
        else:
            return request.json()

    # Removes the "Range" header for some specific API calls.
    def remove_range_header(self, customer):
        """
			Removes a specific header. Depends on which API call is used.
        """
        headers = dict.copy(customer.header)

        try:
            del headers["Range"] 
        except KeyError as e:
            logging.warning("%s: Bad key: %s" % (self.get_time(), e))
        return headers

    # If it doesn't exist already
    def find_ip(self, customer, ID, headers, src_dst="src"):
        """
			Finds and IP based on ID.
			Almost same as above, but not in bulk.
        """
        search_field = ""
        find_ip = ""

        if src_dst == "dst":
            src_dst = "local_destination_addresses" 
            search_field = "local_destination_ip"
        else:
            src_dst = "source_address_ids" 
            search_field = "source_ip"

        target_path = "https://%s/api/siem/%s" % (customer.target, src_dst)
        header = self.remove_range_header(customer)

        try:
            find_ip = requests.get(target_path+"/%s?fields=id%s%s" % \
                (str(ID), "%2C", search_field), headers=header, timeout=5, verify=False)
        except (requests.exceptions.ConnectionError,\
                requests.exceptions.ReadTimeout,\
                AttributeError) as e:
            logging.warning("%s: %s" % (self.get_time(), e))

        try:
            ret_val = find_ip.json()[search_field]
        except (KeyError, UnboundLocalError) as e:
            ret_val = False

        return ret_val

    # Gets the a list of IDs related to IPs 
    def get_reflist(self, customer, ref_name):
        """
            Gets the actual data used to correlate with customer.json rules.
        """
        fields = ""
        headers = self.remove_range_header(customer)
        
        ref_list = "https://%s/api/reference_data/sets/%s" % (customer.target, ref_name) 

        try:
            ref_set = requests.get("%s" % ref_list, headers=headers, timeout=5, verify=False)
        except (requests.exceptions.ConnectionError,\
                requests.exceptions.ReadTimeout,\
                AttributeError) as e:
            logging.warning("%s: %s" % (self.get_time(), e))

        return ref_set

    def get_network_list(self, network_list):
        """
	    Finds the list of networks that are more valuable (e.g. server network)
        """
        arr = []
        for subnet in network_list:
            arr.append(subnet["value"])

        return arr

    # Returns 
    def get_affected_subnet(self, req, customer, network_list, id_list_name, src_dst):
        """
            Checks if the network found in an offense is part of the actual subnet
        """
        affected_subnet = []
        headers = self.remove_range_header(customer)

        if src_dst == "dst":
            ip_variable = "local_destination_ip"
            base_url = "https://%s/api/siem/local_destination_addresses/" % customer.target
            fields = "?fields=local_destination_ip" 
        elif src_dst == "src":
            ip_variable = "source_ip"
            base_url = "https://%s/api/siem/source_addresses/" % customer.target
            fields = "?fields=source_ip" 

        for ID in req.json()[id_list_name]:
            url = base_url+str(ID)+fields
            cnt = 0


            try:
                ip = requests.get(url, headers=headers, verify=False, timeout=5)
            except requests.exceptions.ConnectionError:
                continue

            try:
                ip = ip.json()[ip_variable]
            except KeyError as e:
                logging.warning("%s: %s" % (self.get_time(), e))
                continue

            for network in network_list:
                try:
                    if ip in netaddr.IPNetwork(network):
                        return ip

                except netaddr.core.AddrFormatError as e:
                    logging.warning("%s: %s" % (self.get_time(), e))
                    cnt += 1

        return False

    # Verifies alarms related to reference lists
    def verify_reflist(self, customer, req):
        """
            Verifies multiple reference set alarms. 
        """

        id_list = ["source_address_ids", "local_destination_address_ids"]
    
        affected_subnet = []

        # List of subnets to check
        for ref_set_list in customer.ref_list:
            ref_set = self.get_reflist(customer, ref_set_list)

            # Works because < 255
            if not ref_set.status_code is 200:
                logging.warning("Cannot access reflist.")
                continue

            try:
                network_list = self.get_network_list(ref_set.json()["data"])
            except KeyError as e:
                logging.warning("%s: %s" % (self.get_time(), e))
                if ref_set.json()["number_of_elements"] is 0:
                    msg = "%s might be empty for %s, no action taken." \
                            % (ref_set_list, customer.name)

                    if len(sys.argv) > 1:
                    	if sys.argv[1] == "--verbose" or sys.argv[1] == "-v":	
                    		print(msg)

                    self.write_offense_log(msg) 
					

                continue

            src_affected_subnet = self.get_affected_subnet(req, customer, \
                    network_list, "source_address_ids", "src")
            if src_affected_subnet:
                #sys.stdout.write("SUBNET %s. " % src_affected_subnet)
                return True

            dst_affected_subnet = self.get_affected_subnet(req, customer, \
                    network_list, "local_destination_address_ids", "dst")

            if dst_affected_subnet:
                return True

        return False

    def check_alarm(self, ID, customer):
        """
            Verifies an ID, if it's new etc. Bulk loads and checks if the lowest number 
            is greater than the oldest saved one.
            The horrible forloop verifies if rules are matched based on rules in customer.json
        """
        fields = ""
        valid = True 

        headers = self.remove_range_header(customer)

        try:
            req = requests.get("https://%s/api/siem/offenses/%s%s" % (customer.target, str(ID), fields),\
                     timeout=5, headers=headers, verify=False) 
        except (requests.exceptions.ConnectionError,\
                requests.exceptions.ReadTimeout,\
                AttributeError) as e:
            logging.warning("%s: %s" % (self.get_time(), e))
            return False

        if req.status_code != 200:
            logging.warning("%s Unable to retrieve %s" % (self.get_time(), customer.target))
            return False

        # Checks reference lists from database/customer.json
        if customer.ref_list[0]:
            valid = self.verify_reflist(customer, req) 
        else:
            return False
    
        # Skips if reference list match
        # Can add alarm sending in this one
        
        if not valid:
            return False

        logging.info("%s: %s" % (self.get_time(), \
            "In subnet range. Verifying rules for %s" % customer.name))

        # Checks rules only if offense contains IP in specified IP range
        rule_counter = 0
        for rules in customer.rules: 
            # Iter keys inside rule
            for keys, values in rules.iteritems():
                # Do stuff if not integer values
                if not isinstance(values, int):
                    if values == ".*":
                        rule_counter += 1
                        continue
                    # Checks multiple arguments in same rule split on "|". 
                    for split_item in values.split("|"):
                        for categories in req.json()[keys]:
                            if split_item.lower().startswith("!") \
                                and split_item.lower()[1:] in categories.lower():
                                return False
                                #rule_counter -= 1

                            if split_item.lower() in categories.lower(): 
                                rule_counter += 1

                # INT CHECK
                else:
                    if req.json()[keys] > values:
                        rule_counter += 1
                    else:
                        break

            # Runs alarm if counter is high enough. 
            if rule_counter is len(rules):
                msg = "RULES MATCHED. SHOULD SEND ALARM \o/"
                if len(sys.argv) > 1:
                	if sys.argv[1] == "--verbose" or sys.argv[1] == "-v":	
                		print(msg)

                self.write_offense_log(msg) 
					
                logging.info("%s: Rule triggered - sending alarm" % self.get_time())
                self.run_alarm(req.json(), customer)
                break

            rule_counter = 0
        return True

    # Verify ID here
    def add_new_ID(self, customer, request):
        path = "database/customers/%s/%s" % (customer.name, str(request.json()["id"]))

        if not os.path.exists(path):
            with open(path, "w+") as tmp:
                json.dump(request.json(), tmp)

        logging.info("%s: Added new offense to %s" % (self.get_time(), path))

    # DISCORD SETUP 
    def discord_setup(self, ID, msg):
        alarm_msg = "%s - %s" % (ID, msg)
        call = ["python3.6", "%s/dependencies/chat.py" % dir_path, "\"%s\"" % alarm_msg]
        subprocess.call(" ".join(call), shell=True)
        logging.info("%s: Message sent to discord server." % self.get_time())

    # BEST LOGGER AYY \o/ LMAO
    def write_offense_log(self, data):
        with open("log/offense.log", "a") as tmp:
            try:
                tmp.write("\n%s" % str(data))
            except UnicodeEncodeError as e:
                tmp.write("\nError in parsing data.\n%s" % e)

    # Returns tasklist based on casetitle
    def get_hive_task_data(self, data):
        # Reload every time so it's editable while running.
        with open(cfg.incident_task, "r") as tmp:
            cur_data = json.load(tmp)

        # Is cur_data["description"] in data["description"]:
        for item in json.load(open(cfg.incident_task, "r"))["ruleslist"]:
            if item["description"].lower() in data["description"].lower():
                return item["result"]

    # Checks the normal local subnet ranges. Theres like 7 missing.
    def check_local_subnet(self, ip_address):
        # Returns false if ip not a local address 
        # Yes I know there are more..
        local_ranges = [
            "192.168.0.0/16",
            "172.16.0.0/12",
            "10.0.0.0/8"
        ]

        for item in local_ranges:
            if netaddr.IPAddress(ip_address) in netaddr.IPNetwork(item): 
                return False 

        return True 

    # IP verification lmao
    def verify_offense_source(self, input):
        try:
            netaddr.IPAddress(str(input))
            if not self.check_local_subnet(input):
                return False

            return True
        except netaddr.core.AddrFormatError:
            return False

    # Returns all IPs in an offense by ID
    def get_ip_data(self, customer, data):
        verify_local_ip = [] 

        # Should prolly cache this data.
        # Finds IPs based on and ID - destination
        if data["local_destination_count"] > 0:
            for item in data["local_destination_address_ids"]:
                ip_output = self.find_ip(customer, item, customer.header, "dst")
                if ip_output:
                    if ip_output not in verify_local_ip and self.check_local_subnet(ip_output):
                        verify_local_ip.append(str(ip_output))

        # Finds IPs based on and ID - source 
        if data["source_count"] > 0:
            for item in data["source_address_ids"]:
                ip_output = self.find_ip(customer, item, customer.header)
                if ip_output:
                    if ip_output not in verify_local_ip and self.check_local_subnet(ip_output):
                        verify_local_ip.append(str(ip_output))

        return verify_local_ip

    # Only created for IP currently.
    # Hardcoded for QRadar
    def get_hive_cases(self, customer, data):
        # Offense doesn't return all the IP-addresses.
        verify_local_ip = self.get_ip_data(customer, data)
        find_source = self.verify_offense_source(data["offense_source"])
        
        # Adds offense source if IP observed
        if find_source:
            verify_local_ip.append(str(data["offense_source"]))

        # Returns if no observables found
        # Also means a case will not be created.
        if not verify_local_ip:
            return False

        # Check basic case details first. Customername > Name of offense > category
        # Might be able to search title field for customer name as well. Tags can also be used.
        allcases = self.hive.find_cases(query={"_field": "status", "_value": "Open"})
        customer_caselist = []

        # Finds all the specified customers cases
        for item in allcases.json():
            if customer.name.lower() in item["title"].lower():
                customer_caselist.append(item)

        # Creates a case if no cases are found. Returns list of observed IoCs for case creation
        if not customer_caselist:
            return verify_local_ip 

        use_case = ""
        casename = ""
        # Looks for exact casename match 
        for case in customer_caselist:
            casetitle = case["title"].split(" - ")[1]
            if casetitle == data["description"]:
                use_case = case
                break

        if use_case:
            not_matching = []
            matching_categories = data["categories"]

        # Try to match two categories if exact name match isn't found
        if not use_case:
            # Least amount of categories needed to match
            category_match_number = 2

            category_counter = 0
            for case in customer_caselist:
                matching_categories = []
                not_matching = []
                for category in data["categories"]: 
                    if category in case["tags"]:
                        matching_categories.append(category)
                    else:
                        not_matching.append(category)

                if len(matching_categories) > (category_match_number-1):
                    use_case = case
                    break

        # Will create a new case if observable found and no similar case.
        if not use_case:
            return verify_local_ip 
                 
        # FIX - Hardcoded datatype
        datatype = "ip"
        actual_data = []

        # Finds actual observables for the specified case
        observables = [x["data"] for x in self.hive.get_case_observables(\
            use_case["id"]).json() if x["dataType"] == datatype]

        # Finds if observable exists in previous list
        actual_data = [x for x in verify_local_ip if not x in observables]

        # FIX - check logic here. Might need to add tags etc (offenseID) etc.
        # Only appends data if new observables are detected
        if not actual_data:
            return False

        # Defines what categories to append
        category_breaker = ""
        if not_matching:
            category_breaker = not_matching
        else:
            category_breaker = matching_categories
            
        self.add_observable_data(use_case["id"], actual_data, datatype, data, not_matching) 

        # False to not create another case
        return False

    # Add by caseid and list of specified datatype and a QRadar offense
    def add_observable_data(self, case_id, observables, datatype, data, category):
        observable_items = []
        data_items = []

        tags = [str(data["id"])]
        tags.extend(category)

        for item in observables:
            observable = CaseObservable(
                dataType=datatype,
                data=item,
                tlp=0,
                ioc=True,
                tags=tags,
                message="Possible IoC"
            )

            # Creates the observable
            ret = self.hive.create_case_observable(case_id, observable)
            if ret.ok:
                observable_items.append(ret.json())
                data_items.append(item)
            else:
                continue

        if data_items:
            self.cortex_listener.run_cortex_analyzer(datatype, data_items, observable_items)

    # TheHive case creation
    def create_hive_case(self, customer, data):
        create_hive_bool = self.get_hive_cases(customer, data)

        # Returns if case already merged.
        if not create_hive_bool:
            return False

        # Baseline for creating a case
        title = ("%s: %s - %s" % (customer.name, str(data["id"]), data["description"]))
	static_task = "Why did it happen? Check rule.",
        task_data = self.get_hive_task_data(data)
        tasks = [
            CaseTask(title=static_task)
        ]
        if task_data:
            for item in task_data:
                tasks.append(CaseTask(title=item))

        # Creates a case object
        case = Case(title=title, tlp=0, flag=False, tags=data["categories"], \
                description=data["description"], tasks=tasks)

        # Creates the actual case based on prior info
        ret = self.hive.create_case(case)

        if ret.ok:
            # FIX, datatype is static
            self.add_observable_data(ret.json()["id"], create_hive_bool, \
                "ip", data, data["categories"])
            return True 

        return False

    # Verifies the ID, and returns if it's not a new incident.
    def verify_ID(self, request, customer):
        # In case there are no offenses related to customer. Basically domain management.
        # Attempts to reanalyze in case of failed analysis jobs

        #self.cortex_listener.find_failed_cortex_jobs()

        try:
            if float(customer.version) < 7.0:
                try:
                    json_id = request.json()[len(request.json())-1]['id']
                except (ValueError, IndexError) as e:
                    logging.warning("%s: Customer %s: %s" % (self.get_time(), customer.name, e))
                    return False
                customer.new_version = False
            else:
                json_id = request.json()[0]['id']
        except IndexError:
            logging.info("No offenses for customer.")
            return

        # Use difference between last seen offense and newest.
        last_db = self.db.lget(customer.name, self.db.llen(customer.name)-1)
        cur_array = []
        if json_id > last_db:
            difference = 1

            # Not even a point /o\
            if not json_id-last_db is difference:
                difference = json_id-last_db

            # Looping through incase of earlier crash / multiple offenses in one minute
            for i in range(json_id, last_db, -1):
                cur_var = False 
                if i in self.db.get(customer.name):
                    continue

                # Verifies if the id actually exists
                for item in request.json():
                    if i == item['id']:
                        cur_var = True
                        break

                if not cur_var:
                    continue      

                logging.info("%s: %s: New highest offense found: %d" % (self.get_time(), customer.name, i))

                target = "https://%s/api/siem/offenses/%s" % (customer.target, str(i))
                new_header = self.remove_range_header(customer)

                try:
                    new_req = requests.get(target, headers=new_header, timeout=5, verify=False)
                except requests.exceptions.ConnectionError as e:
                    logging.warning("Internal alarmserver might be down: %s" % e)
                    continue
                except requests.exceptions.ReadTimeout as e:
                    logging.warning("Timeout %s" % e)
                    continue
                # Appends current offense to database/customers/customer/ID in json format. 
                # This is to backtrack 
                ID_ret = self.add_new_ID(customer, new_req)
                new_req = new_req.json()

                try: 
                    # Compatibility issue if missing prerequisites.
                    new_data = urllib.quote("Offense #%s: %s" % (str(i), \
                                     "\n".join(new_req['categories'])))
                except TypeError as e:
                    logging.warning("%s: TypeError: %s" % (self.get_time(), e))
                    new_data = urllib.quote("Offense #%s: %s" % (str(i), "Arbitrary categories"))
                except KeyError as e:
                    logging.warning("%s: KeyError: %s" % (self.get_time(), e))
                    new_data = urllib.quote("Offense #%s: %s" % (str(i), "Arbitrary categories"))

                # Sends a local alarm if an alarmserver is running on the current system. 

                # Prints to screen. Try/catch only in case of errors.
                try:
                    msg = "%s: %s - %s - %s" % (self.get_time(), \
                        str(i).ljust(5), customer.name.ljust(10), ", ".join(new_req['categories']))
                    if len(sys.argv) > 1:
                    	if sys.argv[1] == "--verbose" or sys.argv[1] == "-v":	
                    		print(msg)

                    self.write_offense_log(msg) 
					
                except TypeError as e:
                    logging.warning("%s: TypeError: %s" % (self.get_time(), e))
                except KeyError as e:
                    logging.warning("%s: KeyError: %s" % (self.get_time(), e))

                if cfg.TheHive:
                    self.create_hive_case(customer, new_req) 
                if cfg.discordname and cfg.discordpw:
                    self.discord_setup(str(i), ", ".join(new_req['categories']))

                # verifying if an alarm should be triggered.
                difference = json_id-self.db.llen(customer.name)-1

                # Adds data to the DB
                cur_array.append(i)

                alarm_check = self.check_alarm(i, customer)
                if not alarm_check:
                    continue 

            # Adds all the data to the database
            if cur_array:
                cur_array = sorted(cur_array)

                for items in cur_array:
                    self.db.ladd(customer.name, items)

                
        else:
            return False
	
    # Reload json every time, and check it to prevent failures. verify_json(self, x) 
    def check_connection(self):
        global resetcounter
        for customer in self.customers:
            self.db.dump()
            domain_field = ""
            self.db.set(customer.name+"_counter", int(self.db.get(customer.name+"_counter"))+1)

            # Verifies status codes
            if not self.db.get(customer.name+"_status_code") is 200 \
                and customer.fail_counter % 10 > 0:
                continue

            # Domain management because of some bullshit.
            if customer.domain > 0:
                domain_field = "?filter=domain_id%s%d" % (r'%3D', customer.domain)

            # Makes original request per customer
            try:
                request = requests.get('%s%s' % (customer.target_path, domain_field), \
                    headers=customer.header, timeout=5, verify=False)
            except (requests.exceptions.ConnectionError,\
                    requests.exceptions.ReadTimeout,\
                    AttributeError) as e:
                try:
                    logging.info("%s: Connection failure for %s" % \
                                (self.get_time(), customer.name))
                    continue
                except TypeError as e:
                    logging.warning("%s" % e)
                    self.db.set(customer.name+"_status_code", 401)
                    continue

            # Set previous status code?
            # Legacy, but doesn't hurt nothing \o/
            if request.status_code != 200:
                logging.info("%s: Not 200 for %s - %s" % (self.get_time(), customer.name, \
                            self.db.get(customer.name+"_status_code")))
                self.db.set(customer.name+"_status_code", request.status_code)
                continue
                
            # Sets previous status code in case of shutdown
            self.db.set(customer.name+"_status_code", request.status_code)

            verify_request = self.verify_ID(request, customer)
            if not verify_request: 
                continue
class HiveManagement:
    def __init__(
        self,
        config_file='C:\\automation-hunting\\the-hive\\conf\\thehive-provider.yaml'
    ):

        self.hive_url = None
        self.api_key = None
        self.alert_tags = None
        self.source = None
        self.alert_type = None
        self.case_tags = None
        self.ioc_tags = None

        if not self.get_config_data(config_file):
            raise Exception('Invalid Configuration File')

        self.api = TheHiveApi(self.hive_url, self.api_key)

    def get_config_data(self, yaml_file):
        with open(yaml_file, 'r') as ymlfile:
            cfg = yaml.load(ymlfile, Loader=yaml.FullLoader)

        valid = False
        if self.validate_cfg_yml(cfg):
            self.hive_url = cfg['hive']['hive_url']
            self.api_key = cfg['hive']['api_key']
            self.alert_tags = cfg['hive']['alert_tags']
            self.source = cfg['hive']['source']
            self.alert_type = cfg['hive']['alert_type']
            self.case_tags = cfg['hive']['case_tags']
            self.ioc_tags = cfg['hive']['ioc_tags']
            valid = True
        return valid

    @staticmethod
    def validate_cfg_yml(cfg):
        if 'hive' not in cfg:
            print('Not main')
            return False
        else:
            if 'hive_url' not in cfg['hive'] or 'api_key' not in cfg['hive']:
                return False
        return True

    def create_alarm(self,
                     title,
                     source_ref=None,
                     description='N/A',
                     alert_type='external',
                     source='LogRhythm',
                     iocs=None,
                     additional_fields=None,
                     additional_tags=None,
                     tlp=TLP.AMBER,
                     pap=PAP.AMBER,
                     severity=HiveSeverity.MEDIUM):

        if source_ref is None:
            source_ref = str(uuid.uuid4())[0:6]

        alert_tags = self.alert_tags.copy()
        if additional_tags is not None:
            for additional_tag in additional_tags:
                alert_tags.append(additional_tag)

        custom_fields_helper = CustomFieldHelper()
        if additional_fields is not None:
            for field in additional_fields:
                custom_fields_helper.add_string(field['name'], field['value'])
        custom_fields = custom_fields_helper.build()

        artifacts = list()
        if iocs is not None:
            for ioc in iocs:
                artifacts.append(
                    AlertArtifact(dataType=ioc['type'].value,
                                  data=ioc['value']))

        hive_alert = Alert(title=title,
                           tlp=tlp.value,
                           tags=alert_tags,
                           description=description,
                           type=alert_type,
                           source=source,
                           sourceRef=source_ref,
                           pap=pap.value,
                           artifacts=artifacts,
                           customFields=custom_fields,
                           severity=severity.value)

        response = self.api.create_alert(hive_alert)
        if response.status_code == 201:
            print('Alerta Creada Exitosamente')
            print(json.dumps(response.json(), indent=4, sort_keys=True))
        else:
            print('Error')
            print(response.text)

        return response.json()

    def create_case(self,
                    title,
                    tasks=None,
                    tlp=TLP.AMBER,
                    pap=PAP.AMBER,
                    severity=HiveSeverity.MEDIUM,
                    additional_fields=None,
                    additional_tags=None,
                    flag=False,
                    description='N/A'):

        case_tags = self.case_tags.copy()
        if additional_tags is not None:
            for additional_tag in additional_tags:
                case_tags.append(additional_tag)

        custom_fields_helper = CustomFieldHelper()
        if additional_fields is not None:
            for field in additional_fields:
                custom_fields_helper.add_string(field['name'], field['value'])
        custom_fields = custom_fields_helper.build()

        new_tasks = list()
        if tasks is not None:
            for task in tasks:
                new_tasks.append(CaseTask(title=task))

        hive_case = Case(title=title,
                         tlp=tlp.value,
                         pap=pap.value,
                         description=description,
                         tags=case_tags,
                         severity=severity.value,
                         flag=flag,
                         customFields=custom_fields,
                         tasks=new_tasks)

        response = self.api.create_case(hive_case)
        if response.status_code == 201:
            print('Caso Creada Exitosamente')
            print(json.dumps(response.json(), indent=4, sort_keys=True))
        else:
            print('Error')
            print(response.text)

        return response.json()

    def create_case_observable(self,
                               data_type: HiveDataType,
                               value: list,
                               tlp=TLP.AMBER,
                               ioc=True,
                               additional_tags=None,
                               description='LogRhythm IoC'):

        ioc_tags = self.ioc_tags.copy()
        if additional_tags is not None:
            for additional_tag in additional_tags:
                ioc_tags.append(additional_tag)

        hive_observable = CaseObservable(data_type=data_type.value,
                                         data=value,
                                         tlp=tlp.value,
                                         ioc=ioc,
                                         tags=ioc_tags,
                                         message=description)

        return hive_observable

    def add_observable_to_case(self, case_id, observable: CaseObservable):
        response = self.api.create_case_observable(case_id, observable)
        if response.status_code == 201:
            print('Observable successfully added to the case')
            print(json.dumps(response.json(), indent=4, sort_keys=True))
        else:
            print('Error')
            print(response.text)

    def search_case(self,
                    title=None,
                    tlp: TLP = None,
                    pap: PAP = None,
                    severity: HiveSeverity = None,
                    or_operator=False):
        if title is None and tlp is None and pap is None and severity is None:
            print('Can\'t search without a filter')
            return None

        operators = list()
        if title is not None:
            operators.append(String('title: ' + urllib.parse.quote(title)))
        if tlp is not None:
            operators.append(Gte('tlp', tlp.value))
        if pap is not None:
            operators.append(Gte('pap', pap.value))
        if severity is not None:
            operators.append(Gte('severity', severity.value))

        if len(operators) == 1:
            query = operators[0]
        else:
            if or_operator:
                query = Or(operators)
            else:
                query = And(operators)

        response = self.api.find_cases(query=query, range='all', sort=[])
        if response.status_code == 200:
            print('Busqueda correcta')
            print(json.dumps(response.json(), indent=4, sort_keys=True))
        else:
            print('Error')
            print(response.text)

        return response.json()

    def promote_alert(self, alert_id):
        response = self.api.promote_alert_to_case(alert_id)
        if response.status_code == 201:
            print('Correct Promotion')
            print(json.dumps(response.json(), indent=4, sort_keys=True))
        else:
            print('Error')
            print(response.text)

        return response.json()
Пример #16
0
            i += 1
    else:
        print('fubard')
    update_sirp(data)

def update_sirp(data):
    """Auto Closes The Hive cases that meet criteria

    Posts case closure
    """
    i = 0
    while i < len(data):
        if data[i]['SentinelResolved'] is True:
            try:
                API.case.update(data[i]['sirpId'],
                                status='Resolved',
                                resolutionStatus='Other',
                                summary='Case Resolved at Sentinel One Console, autoclosed',
                                tags=['SentinelOne API'])
            except:
                pass
        else:
            pass
        i += 1

RESPONSE = API.find_cases(query=And(Eq('status', 'Open'), Eq('owner', 'sentinelone')),
                          range='all',
                          sort=[])
check_status(RESPONSE)
exit()