def dns_requests(self, dns_requests):
     self._dns_requests = []
     
     try:
         for request in dns_requests:
             if isinstance(request, DnsRequest):
                 if not self.whitelister.is_domain_whitelisted(request.request):
                     if RegexHelpers.is_ip(request.answer):
                         if not self.whitelister.is_ip_whitelisted(request.answer):
                             self._dns_requests.append(request)
                     elif RegexHelpers.is_domain(request.answer):
                         if not self.whitelister.is_domain_whitelisted(request.answer):
                             self._dns_requests.append(request)
                     else:
                         self._dns_requests.append(request)
     except TypeError:
         pass
Exemplo n.º 2
0
def generate_url_indicators(url_list, whitelister=None):
    indicators = []

    # In case we were given a string (a single URL), add it
    # to a list for consistent processing.
    if isinstance(url_list, str):
        url_list = [url_list]

    # Parse the URLs so that we can create Indicators and also prevent
    # "duplicate" URLs like http://blah.com/ and http://blah.com
    for url in url_list:
        if RegexHelpers.is_url(url):
            # Strip off the ending slash if it's there.
            if url.endswith("/"):
                url = url[:-1]

            parsed_url = urlsplit(url)

            # Is the netloc an IP address?
            if RegexHelpers.is_ip(parsed_url.netloc):
                netloc_type = "Address - ipv4-addr"
            # If the netloc is not an IP, it must be a domain.
            else:
                netloc_type = "URI - Domain Name"

            # Make an Indicator for the URI host.
            try:
                ind = Indicator(parsed_url.netloc, netloc_type)
                ind.add_tags("uri_host")
                ind.add_relationships(url)
                indicators.append(ind)
            except ValueError:
                pass

            # Make an Indicator for the full URL.
            try:
                ind = Indicator(url, "URI - URL")
                ind.add_relationships(parsed_url.netloc)
                indicators.append(ind)
            except ValueError:
                pass

            # Make an Indicator for the path (if there is one).
            if parsed_url.path and parsed_url.path != "/":
                try:
                    ind = Indicator(parsed_url.path, "URI - Path")
                    ind.add_tags(["uri_path", parsed_url.netloc])
                    ind.add_relationships([url, parsed_url.netloc])
                    indicators.append(ind)
                except ValueError:
                    pass

            # Make an Indicator for the path including the query items.
            if parsed_url.path and parsed_url.path != "/":
                try:
                    # Check if there were any ? query items.
                    if parsed_url.query:
                        uri_path = parsed_url.path + "?" + parsed_url.query

                        ind = Indicator(uri_path, "URI - Path")
                        ind.add_tags(["uri_path", parsed_url.netloc])
                        ind.add_relationships([url, parsed_url.netloc])
                        indicators.append(ind)
                except ValueError:
                    pass

    return indicators
    def update_sandbox_analysis(self, sandbox_dict):
        self.logger.debug("Updating Sandbox Analysis section.")

        # Get a working copy of the sandbox analysis section.
        #sandbox_analysis = self.get_section("sandbox_analysis")

        # Create the parent div tag.
        div = self.new_tag("div")

        # Continue if we were given a sandbox dictionary.
        if sandbox_dict:
            # Add the header tag.
            header = self.new_tag("h2", parent=div)
            header.string = "Sandbox Analysis"

            for hash in sandbox_dict:
                # Get a single deduped version of the reports.
                dedup_report = BaseSandboxParser.dedup_reports(
                    sandbox_dict[hash])

                # Add a header for the sample's filename.
                header = self.new_tag("h3", parent=div)
                header.string = dedup_report.filename

                ####################
                ##                ##
                ##  SANDBOX URLS  ##
                ##                ##
                ####################
                self.logger.debug("Updating sandbox URLs for " + hash)

                # Make the new sub-section.
                sandbox_urls_section_id = "sandbox_urls_" + hash
                sandbox_urls_section = self.make_section(
                    sandbox_urls_section_id, parent=div)

                # Create a new parent div for the sub-section.
                sandbox_urls_div = self.new_tag("div")

                # Add a header tag for the URLs.
                header = self.new_tag("h4", parent=sandbox_urls_div)
                header.string = "Sandbox URLs"

                # Add an unordered list for the reports.
                ul = self.new_tag("ul", parent=sandbox_urls_div)

                # Add list items for each report.
                for report in sandbox_dict[hash]:
                    li = self.new_tag("li", parent=ul)
                    li.string = report.sandbox_display_name + " = "
                    link = self.new_tag("a", parent=li)
                    link["href"] = report.sandbox_url
                    link.string = report.filename

                if sandbox_dict[hash][0].sha256:
                    li = self.new_tag("li", parent=ul)
                    link = self.new_tag("a", parent=li)
                    link[
                        "href"] = "https://virustotal.com/en/file/" + sandbox_dict[
                            hash][0].sha256 + "/analysis/"
                    link.string = "VirusTotal"

                # Update the sub-section.
                self.update_section(sandbox_urls_div,
                                    old_section_soup=sandbox_urls_section)

                ###################
                ##               ##
                ##  SCREENSHOTS  ##
                ##               ##
                ###################
                # Only continue if there are actually some screenshots.
                if any(report.screenshot_path
                       for report in sandbox_dict[hash]):
                    self.logger.debug("Updating screenshots for " + hash)

                    # Make the new sub-section.
                    screenshot_section_id = "screenshot_" + hash
                    screenshot_section = self.make_section(
                        screenshot_section_id, parent=div)

                    # Create a new parent div for the sub-section.
                    screenshots_div = self.new_tag("div")

                    # Add a header tag for the screenshots.
                    header = self.new_tag("h4", parent=screenshots_div)
                    header.string = "Screenshots"

                    for report in sandbox_dict[hash]:
                        if report.screenshot_path:
                            screenshot_name = os.path.basename(
                                report.screenshot_path)

                            # Upload the screenshot as an attachment if it doesn't already exist.
                            if not self.attachment_exists(screenshot_name):
                                self.attach_file(report.screenshot_path)

                            # If the screenshot attachment exists, add an img tag for it.
                            if self.attachment_exists(screenshot_name):
                                title_p = self.new_tag("p",
                                                       parent=screenshots_div)
                                title_p[
                                    "style"] = "color:#009000; font-weight:bold;"
                                title_p.string = report.sandbox_display_name + " - " + report.sandbox_vm_name

                                img_p = self.new_tag("p",
                                                     parent=screenshots_div)
                                img = self.new_tag("img", parent=img_p)
                                img["width"] = "1000"
                                img["height"] = "562"
                                src = "/download/attachments/" + str(
                                    self.get_page_id()
                                ) + "/" + screenshot_name + "?effects=border-simple,blur-border,tape"
                                img["src"] = src

                    self.update_section(screenshots_div,
                                        old_section_soup=screenshot_section)

                ###############
                ##           ##
                ##  MUTEXES  ##
                ##           ##
                ###############
                # Only continue if there are actually some mutexes.
                if dedup_report.mutexes:
                    self.logger.debug("Updating mutexes for " + hash)

                    # Make the new sub-section.
                    mutexes_section_id = "mutexes_" + hash
                    mutex_section = self.make_section(mutexes_section_id,
                                                      parent=div)

                    # Create a new parent div for the sub-section.
                    mutexes_div = self.new_tag("div")

                    # Add a header tag for the mutexes.
                    header = self.new_tag("h4", parent=mutexes_div)
                    header.string = "Mutexes"

                    # Add a pre tag to hold them.
                    pre = self.new_tag("pre", parent=mutexes_div)
                    pre["style"] = "border:1px solid gray;padding:5px;"
                    pre.string = ""

                    for mutex in dedup_report.mutexes:
                        pre.string += mutex + "\n"

                    self.update_section(mutexes_div,
                                        old_section_soup=mutex_section)

                #####################
                ##                 ##
                ##  DROPPED FILES  ##
                ##                 ##
                #####################
                # Only continue if there are actually any dropped files.
                if dedup_report.dropped_files:
                    self.logger.debug("Updating dropped files for " + hash)

                    # Make the new sub-section.
                    dropped_section_id = "dropped_" + hash
                    dropped_section = self.make_section(dropped_section_id,
                                                        parent=div)

                    # Create a new parent div for the sub-section.
                    dropped_div = self.new_tag("div")

                    # Add a header tag for the dropped files.
                    header = self.new_tag("h4", parent=dropped_div)
                    header.string = "Dropped Files"

                    # Create a new table tag.
                    table = self.new_tag("table", parent=dropped_div)

                    # Set up the table header row.
                    thead = self.new_tag("thead", parent=table)
                    tr = self.new_tag("tr", parent=thead)
                    titles = [
                        "VirusTotal", "Filename", "Path", "Size", "Type",
                        "MD5", "SHA256"
                    ]
                    for title in titles:
                        th = self.new_tag("th", parent=tr)
                        th.string = title

                    # Set up the table body rows.
                    tbody = self.new_tag("tbody", parent=table)
                    for file in dedup_report.dropped_files:
                        tr = self.new_tag("tr", parent=tbody)

                        td = self.new_tag("td", parent=tr)
                        if file.sha256:
                            url = self.new_tag("a", parent=td)
                            vt_url = "https://virustotal.com/en/file/" + file.sha256 + "/analysis/"
                            url["href"] = vt_url
                            url.string = "VT"

                        td = self.new_tag("td", parent=tr)
                        td.string = file.filename

                        td = self.new_tag("td", parent=tr)
                        td.string = file.path

                        td = self.new_tag("td", parent=tr)
                        td.string = file.size

                        td = self.new_tag("td", parent=tr)
                        td.string = file.type

                        td = self.new_tag("td", parent=tr)
                        td.string = file.md5

                        td = self.new_tag("td", parent=tr)
                        td.string = file.sha256

                    # Update the sub-section.
                    self.update_section(dropped_div,
                                        old_section_soup=dropped_section)

                ####################
                ##                ##
                ##  DNS REQUESTS  ##
                ##                ##
                ####################
                # Only continue if there are actually any dropped files.
                if dedup_report.dns_requests:
                    self.logger.debug("Updating DNS requests for " + hash)

                    # Make the new sub-section.
                    dns_section_id = "dns_" + hash
                    dns_section = self.make_section(dns_section_id, parent=div)

                    # Create a new parent div for the sub-section.
                    dns_div = self.new_tag("div")

                    # Add a header tag for the DNS requests.
                    header = self.new_tag("h4", parent=dns_div)
                    header.string = "DNS Requests"

                    # Create a new table tag.
                    table = self.new_tag("table", parent=dns_div)

                    # Set up the table header row.
                    thead = self.new_tag("thead", parent=table)
                    tr = self.new_tag("tr", parent=thead)
                    titles = [
                        "VirusTotal", "Request", "Type", "VirusTotal",
                        "Answer", "Answer Type"
                    ]
                    for title in titles:
                        th = self.new_tag("th", parent=tr)
                        th.string = title

                    # Set up the table body rows.
                    tbody = self.new_tag("tbody", parent=table)
                    for request in dedup_report.dns_requests:
                        tr = self.new_tag("tr", parent=tbody)

                        td = self.new_tag("td", parent=tr)
                        url = self.new_tag("a", parent=td)
                        vt_url = "https://virustotal.com/en/domain/" + request.request + "/information/"
                        url["href"] = vt_url
                        url.string = "VT"

                        td = self.new_tag("td", parent=tr)
                        td.string = request.request

                        td = self.new_tag("td", parent=tr)
                        td.string = request.type

                        td = self.new_tag("td", parent=tr)
                        if request.answer:
                            if RegexHelpers.is_ip(request.answer):
                                vt_url = "https://virustotal.com/en/ip-address/" + request.answer + "/information/"
                            else:
                                vt_url = "https://virustotal.com/en/domain/" + request.answer + "/information/"

                            url = self.new_tag("a", parent=td)
                            url["href"] = vt_url
                            url.string = "VT"

                        td = self.new_tag("td", parent=tr)
                        td.string = request.answer

                        td = self.new_tag("td", parent=tr)
                        td.string = request.answer_type

                    # Update the sub-section.
                    self.update_section(dns_div, old_section_soup=dns_section)

                #####################
                ##                 ##
                ##  HTTP REQUESTS  ##
                ##                 ##
                #####################
                # Only continue if there are actually any dropped files.
                if dedup_report.http_requests:
                    self.logger.debug("Updating HTTP requests for " + hash)

                    # Make the new sub-section.
                    http_section_id = "http_" + hash
                    http_section = self.make_section(http_section_id,
                                                     parent=div)

                    # Create a new parent div for the sub-section.
                    http_div = self.new_tag("div")

                    # Add a header tag for the DNS requests.
                    header = self.new_tag("h4", parent=http_div)
                    header.string = "HTTP Requests"

                    # Create a new table tag.
                    table = self.new_tag("table", parent=http_div)

                    # Set up the table header row.
                    thead = self.new_tag("thead", parent=table)
                    tr = self.new_tag("tr", parent=thead)
                    titles = [
                        "VirusTotal", "Method", "Host", "URI", "Port",
                        "User-Agent"
                    ]
                    for title in titles:
                        th = self.new_tag("th", parent=tr)
                        th.string = title

                    # Set up the table body rows.
                    tbody = self.new_tag("tbody", parent=table)
                    for request in dedup_report.http_requests:
                        tr = self.new_tag("tr", parent=tbody)

                        td = self.new_tag("td", parent=tr)
                        url = self.new_tag("a", parent=td)
                        full_url = "http://" + request.host + request.uri
                        url_hash = hashlib.sha256(
                            full_url.encode()).hexdigest()
                        vt_url = "https://virustotal.com/en/url/" + url_hash + "/analysis/"
                        url["href"] = vt_url
                        url.string = "VT"

                        td = self.new_tag("td", parent=tr)
                        td.string = request.method

                        td = self.new_tag("td", parent=tr)
                        td.string = request.host

                        td = self.new_tag("td", parent=tr)
                        td.string = request.uri

                        td = self.new_tag("td", parent=tr)
                        td.string = request.port

                        td = self.new_tag("td", parent=tr)
                        td.string = request.user_agent

                    # Update the sub-section.
                    self.update_section(http_div,
                                        old_section_soup=http_section)

                #######################
                ##                   ##
                ##  CONTACTED HOSTS  ##
                ##                   ##
                #######################
                # Only continue if there are actually any dropped files.
                if dedup_report.contacted_hosts:
                    self.logger.debug("Updating contacted hosts for " + hash)

                    # Make the new sub-section.
                    hosts_section_id = "hosts_" + hash
                    hosts_section = self.make_section(hosts_section_id,
                                                      parent=div)

                    # Create a new parent div for the sub-section.
                    hosts_div = self.new_tag("div")

                    # Add a header tag for the DNS requests.
                    header = self.new_tag("h4", parent=hosts_div)
                    header.string = "Contacted Hosts"

                    # Create a new table tag.
                    table = self.new_tag("table", parent=hosts_div)

                    # Set up the table header row.
                    thead = self.new_tag("thead", parent=table)
                    tr = self.new_tag("tr", parent=thead)
                    titles = [
                        "VirusTotal", "Tor Node", "Address", "Port",
                        "Protocol", "Location", "Associated Domains"
                    ]
                    for title in titles:
                        th = self.new_tag("th", parent=tr)
                        th.string = title

                    # Set up the table body rows.
                    tbody = self.new_tag("tbody", parent=table)
                    for host in dedup_report.contacted_hosts:
                        tr = self.new_tag("tr", parent=tbody)

                        td = self.new_tag("td", parent=tr)
                        url = self.new_tag("a", parent=td)
                        vt_url = "https://virustotal.com/en/ip-address/" + host.ipv4 + "/information/"
                        url["href"] = vt_url
                        url.string = "VT"

                        td = self.new_tag("td", parent=tr)
                        if self.whitelister.is_tor_node(host.ipv4):
                            td.string = "True"

                        td = self.new_tag("td", parent=tr)
                        td.string = host.ipv4

                        td = self.new_tag("td", parent=tr)
                        td.string = host.port

                        td = self.new_tag("td", parent=tr)
                        td.string = host.protocol

                        td = self.new_tag("td", parent=tr)
                        td.string = host.location

                        td = self.new_tag("td", parent=tr)
                        td.string = host.associated_domains_string

                    # Update the sub-section.
                    self.update_section(hosts_div,
                                        old_section_soup=hosts_section)

                #####################
                ##                 ##
                ##  PROCESS TREES  ##
                ##                 ##
                #####################
                # Only continue if there are actually some process trees.
                if dedup_report.process_tree_list:
                    self.logger.debug("Updating process tree for " + hash)

                    # Make the new sub-section.
                    process_section_id = "process_" + hash
                    process_section = self.make_section(process_section_id,
                                                        parent=div)

                    # Create a new parent div for the sub-section.
                    process_div = self.new_tag("div")

                    # Add a header tag for the mutexes.
                    header = self.new_tag("h4", parent=process_div)
                    header.string = "Process Tree"

                    # Add a pre tag to hold them.
                    pre = self.new_tag("pre", parent=process_div)
                    pre["style"] = "border:1px solid gray;padding:5px;"
                    pre.string = ""

                    for tree in dedup_report.process_tree_list:
                        pre.string += tree + "\n"

                    self.update_section(process_div,
                                        old_section_soup=process_section)

        self.update_section(div, old_section_id="sandbox_analysis")
 def ipv4(self, ip):
     if RegexHelpers.is_ip(ip):
         self.__ipv4 = ip
     else:
         self.__ipv4 = ""
    def extract_indicators(self, check_whitelist=True):
        # Make an Indicator for the sample's MD5 hash.
        if RegexHelpers.is_md5(self.md5):
            try:
                ind = Indicator.Indicator(self.md5, "Hash - MD5")
                ind.add_tags("sandboxed_sample")
                self.iocs.append(ind)
            except ValueError:
                pass
        
        # Make an Indicator for the sample's SHA1 hash.
        if RegexHelpers.is_sha1(self.sha1):
            try:
                ind = Indicator.Indicator(self.sha1, "Hash - SHA1")
                ind.add_tags("sandboxed_sample")
                self.iocs.append(ind)
            except ValueError:
                pass
            
        # Make an Indicator for the sample's SHA256 hash.
        if RegexHelpers.is_sha256(self.sha256):
            try:
                ind = Indicator.Indicator(self.sha256, "Hash - SHA256")
                ind.add_tags("sandboxed_sample")
                self.iocs.append(ind)
            except ValueError:
                pass
            
        # Make Indicators for any contacted hosts.
        for host in self.contacted_hosts:
            # Make an Indicator for the IP itself.
            if RegexHelpers.is_ip(host.ipv4):
                try:
                    ind = Indicator.Indicator(host.ipv4, "Address - ipv4-addr")
                    ind.add_tags("contacted_host")
                    if host.protocol and host.port:
                        ind.add_tags(host.protocol + " " + host.port)
                    elif host.protocol and not host.port:
                        indicator.add_tag(host.protocol)
                    self.iocs.append(ind)
                except ValueError:
                    pass

                # Make Indicators for any associated domains.
                for domain in host.associated_domains:
                    if RegexHelpers.is_domain(domain["domain"]):
                        try:
                            ind = Indicator.Indicator(domain["domain"], "URI - Domain Name")
                            ind.add_tags("associated_to_" + host.ipv4)
                            ind.add_relationships(host.ipv4)
                            self.iocs.append(ind)
                        except ValueError:
                            pass

        # Make Indicators for any DNS requests.
        for request in self.dns_requests:
            # Make an Indicator for the requested domain.
            if RegexHelpers.is_domain(request.request):
                try:
                    ind = Indicator.Indicator(request.request, "URI - Domain Name")
                    ind.add_tags("dns_request")
                    # If the DNS answer is an IP, add a tag for it and
                    # also create an Indicator for it.
                    if RegexHelpers.is_ip(request.answer):
                        ind.add_tags(request.answer)

                        try:
                            ip_ind = Indicator.Indicator(request.answer, "Address - ipv4-addr")
                            ip_ind.add_tags(["dns_response", request.request])
                            self.iocs.append(ip_ind)
                        except ValueError:
                            pass

                    self.iocs.append(ind)
                except ValueError:
                    pass
                
        # Make Indicators for any dropped files.
        # TODO: Add back in the ability to only make Indicators for "interesting"
        # dropped files, based on file type or file extension.
        for file in self.dropped_files:
            # Make an Indicator for the file path.
            try:
                ind = Indicator.Indicator(file.path, "Windows - FilePath")
                ind.add_tags("dropped_file")
                ind.add_relationships(file.filename)
                self.iocs.append(ind)
            except ValueError:
                pass

            # Make an Indicator for the file name.
            try:
                ind = Indicator.Indicator(file.filename, "Windows - FileName")
                ind.add_tags("dropped_file")
                ind.add_relationships([file.path, file.md5, file.sha1, file.sha256])
                self.iocs.append(ind)
            except ValueError:
                pass

            # Make an Indicator for the MD5 hash.
            if RegexHelpers.is_md5(file.md5):
                try:
                    ind = Indicator.Indicator(file.md5, "Hash - MD5")
                    ind.add_tags([file.filename, "dropped_file"])
                    ind.add_relationships([file.filename, file.path, file.sha1, file.sha256])
                    self.iocs.append(ind)
                except ValueError:
                    pass

            # Make an Indicator for the SHA1 hash.
            if RegexHelpers.is_sha1(file.sha1):
                try:
                    ind = Indicator.Indicator(file.sha1, "Hash - SHA1")
                    ind.add_tags([file.filename, "dropped_file"])
                    ind.add_relationships([file.filename, file.path, file.md5, file.sha256])
                    self.iocs.append(ind)
                except ValueError:
                    pass

            # Make an Indicator for the SHA256 hash.
            if RegexHelpers.is_sha256(file.sha256):
                try:
                    ind = Indicator.Indicator(file.sha256, "Hash - SHA256")
                    ind.add_tags([file.filename, "dropped_file"])
                    ind.add_relationships([file.filename, file.path, file.md5, file.sha1])
                    self.iocs.append(ind)
                except ValueError:
                    pass
                    
        # Make Indicators for any HTTP requests.
        for request in self.http_requests:
            # Check if the host is a domain or IP.
            if RegexHelpers.is_ip(request.host):
                indicator_type = "Address - ipv4-addr"
            # Otherwise it must be a domain.
            else:
                indicator_type = "URI - Domain Name"

            # Make an Indicator for the host.
            try:
                ind = Indicator.Indicator(request.host, indicator_type)
                ind.add_tags(["http_request", request.method])
                if request.method == "POST":
                    ind.add_tags("c2")
                self.iocs.append(ind)
            except ValueError:
                pass

            # Make an Indicator for the URI path.
            if request.uri != "/":
                try:
                    ind = Indicator.Indicator(request.uri, "URI - Path")
                    ind.add_tags(["http_request", request.method, request.host])
                    if request.method == "POST":
                        ind.add_tags("c2")
                    ind.add_relationships(request.host)
                    self.iocs.append(ind)
                except ValueError:
                    pass

            # Make an Indicator for the full URL.
            try:
                url = "http://" + request.host + request.uri
                ind = Indicator.Indicator(url, "URI - URL")
                ind.add_tags(["http_request", request.method])
                if request.method == "POST":
                    ind.add_tags("c2")
                ind.add_relationships([request.host, request.uri])
                self.iocs.append(ind)
            except ValueError:
                pass

            # Make an Indicator for the User-Agent.
            try:
                ind = Indicator.Indicator(request.user_agent, "URI - HTTP - UserAgent")
                ind.add_tags(["http_request", request.method, request.host])
                if request.method == "POST":
                    ind.add_tags("c2")
                ind.add_relationships([request.host, request.uri])
                self.iocs.append(ind)
            except ValueError:
                pass
                
        # Make Indicators for any memory URLs. Currently, only VxStream
        # has this memory URL feature.
        indicator_list = Indicator.generate_url_indicators(self.memory_urls)

        # Add some extra tags to the generated indicators and
        # then add them to our main IOC list.
        for ind in indicator_list:
            ind.add_tags("url_in_memory")
            self.iocs.append(ind)
                
        # Make Indicators for any URLs found in the sample's strings.
        indicator_list = Indicator.generate_url_indicators(self.strings_urls)

        # Add some extra tags to the generated indicators and
        # then add them to our main IOC list.
        for ind in indicator_list:
            ind.add_tags("url_in_strings")
            self.iocs.append(ind)

        # Make Indicators for any URLs found in the sample's process tree.
        indicator_list = Indicator.generate_url_indicators(self.process_tree_urls)

        # Add some extra tags to the generated indicators and
        # then add them to our main IOC list.
        for ind in indicator_list:
            ind.add_tags("url_in_process_tree")
            self.iocs.append(ind)

        # Make Indicators for any mutexes.
        for mutex in self.mutexes:
            try:
                ind = Indicator.Indicator(mutex, "Windows - Mutex")
                ind.add_tags("mutex_created")
                self.iocs.append(ind)
            except ValueError:
                pass
                
        # Run the IOCs through the whitelists if requested.
        if check_whitelist:
            self.iocs = Indicator.run_whitelist(self.iocs)
            
        # Finally merge the IOCs so we don't have any duplicates.
        self.iocs = Indicator.merge_duplicate_indicators(self.iocs)