def ioc_to_tag(self,
                   data: bytes,
                   patterns: PatternMatch,
                   res: Optional[ResultSection] = None,
                   taglist: bool = False,
                   check_length: bool = False,
                   strs_max_size: int = 0,
                   st_max_length: int = 300) -> Dict[str, Set[str]]:
        """Searches data for patterns and adds as AL tag to result output.

        Args:
            data: Data to be searched.
            patterns: FrankenStrings Patterns() object.
            res: AL result.
            taglist: True if tag list should be returned.
            check_length: True if length of string should be compared to st_max_length.
            strs_max_size: Maximum size of strings list. If greater then only network IOCs will be searched.
            st_max_length: Maximum length of a string from data that can be searched.

        Returns: tag list as dictionary (always empty if taglist is false)
        """

        tags: Dict[str, Set[str]] = {}

        min_length = self.st_min_length if check_length else 4

        strs: Set[bytes] = set()
        just_network = False

        # Flare-FLOSS ascii string extract
        for ast in strings.extract_ascii_strings(data, n=min_length):
            if not check_length or len(ast.s) < st_max_length:
                strs.add(ast.s)
        # Flare-FLOSS unicode string extract
        for ust in strings.extract_unicode_strings(data, n=min_length):
            if not check_length or len(ust.s) < st_max_length:
                strs.add(ust.s)

        if check_length and len(strs) > strs_max_size:
            just_network = True

        for s in strs:
            st_value: Dict[str, Iterable[bytes]] = patterns.ioc_match(
                s, bogon_ip=True, just_network=just_network)
            for ty, val in st_value.items():
                if taglist and ty not in tags:
                    tags[ty] = set()
                for v in val:
                    if ty == 'network.static.domain' and not is_valid_domain(
                            v.decode('utf-8')):
                        continue
                    if ty == 'network.email.address' and not is_valid_email(
                            v.decode('utf-8')):
                        continue
                    if len(v) < 1001:
                        if res:
                            res.add_tag(ty, safe_str(v))
                        if taglist:
                            tags[ty].add(safe_str(v))
        return tags
def _validate_tag(
    result_section: ResultSection,
    tag: str,
    value: Any,
    safelist: Dict[str, Dict[str, List[str]]] = None
) -> bool:
    """
    This method validates the value relative to the tag type before adding the value as a tag to the ResultSection.
    :param result_section: The ResultSection that the tag will be added to
    :param tag: The tag type that the value will be tagged under
    :param value: The item that will be tagged under the tag type
    :param safelist: The safelist containing matches and regexs. The product of a service using self.get_api_interface().get_safelist().
    :return: Tag was successfully added
    """
    if safelist is None:
        safelist = {}

    regex = _get_regex_for_tag(tag)
    if regex and not match(regex, value):
        return False

    if "ip" in tag and not is_valid_ip(value):
        return False

    if "domain" in tag:
        if not is_valid_domain(value):
            return False
        elif value in FALSE_POSITIVE_DOMAINS_FOUND_IN_PATHS:
            return False
        elif isinstance(value, str) and value.split(".")[-1] in COMMON_FILE_EXTENSIONS:
            return False

    if is_tag_safelisted(value, [tag], safelist):
        return False

    # if "uri" is in the tag, let's try to extract its domain/ip and tag it.
    if "uri_path" not in tag and "uri" in tag:
        # First try to get the domain
        valid_domain = False
        domain = search(DOMAIN_REGEX, value)
        if domain:
            domain = domain.group()
            valid_domain = _validate_tag(result_section, "network.dynamic.domain", domain, safelist)
        # Then try to get the IP
        valid_ip = False
        ip = search(IP_REGEX, value)
        if ip:
            ip = ip.group()
            valid_ip = _validate_tag(result_section, "network.dynamic.ip", ip, safelist)

        if value not in [domain, ip] and (valid_domain or valid_ip):
            result_section.add_tag(tag, safe_str(value))
        else:
            return False
    else:
        result_section.add_tag(tag, safe_str(value))

    return True
예제 #3
0
    def tag_analyze(self, value, section):
        if is_valid_ip(value):
            section.add_tag(TAG_TYPE["NET_IP"], value, TAG_WEIGHT.LOW)

        if is_valid_email(value):
            section.add_tag(TAG_TYPE["NET_EMAIL"], value, TAG_WEIGHT.LOW)

        if is_valid_domain(value):
            section.add_tag(TAG_TYPE["NET_DOMAIN"], value, TAG_WEIGHT.LOW)
예제 #4
0
    def check(self, value, **kwargs):
        if not value:
            return None

        if not self.validation_regex.match(value):
            raise ValueError(f"[{self.name or self.parent_name}] '{value}' not match the "
                             f"validator: {self.validation_regex.pattern}")

        if not is_valid_domain(value):
            raise ValueError(f"[{self.name or self.parent_name}] '{value}' has a non-valid TLD.")

        return value.lower()
예제 #5
0
    def check(self, value, **kwargs):
        if not value:
            return None

        match = self.validation_regex.match(value)
        if not match:
            raise ValueError(f"[{self.name or self.parent_name}] '{value}' not match the "
                             f"validator: {self.validation_regex.pattern}")

        if not is_valid_domain(match.group(2)) and not is_valid_ip(match.group(2)):
            raise ValueError(f"[{self.name or self.parent_name}] '{match.group(2)}' in URI '{value}'"
                             " is not a valid Domain or IP.")

        return match.group(0).replace(match.group(1), match.group(1).lower())
예제 #6
0
    def check(self, value, **kwargs):
        if not value:
            return None

        match = self.validation_regex.match(value)
        if not match:
            raise ValueError(f"[{self.name or self.parent_name}] '{value}' not match the "
                             f"validator: {self.validation_regex.pattern}")

        if not is_valid_domain(match.group(1)):
            raise ValueError(f"[{self.name or self.parent_name}] '{match.group(1)}' in email '{value}'"
                             " is not a valid Domain.")

        return value.lower()
    def find_network_indicators(apktool_out_dir: str, result: Result):
        # Whitelist
        skip_list = [
            "android.intent",
            "com.google",
            "com.android",
        ]

        indicator_whitelist = [
            'google.to',
            'google.ttl',
            'google.delay',
            'google_tagmanager.db',
            'gtm_urls.db',
            'gtm.url',
            'google_tagmanager.db',
            'google_analytics_v4.db',
            'Theme.Dialog.Alert',
            'popupLocationInfo.gravity',
            'popupLocationInfo.displayId',
            'popupLocationInfo.left',
            'popupLocationInfo.top',
            'popupLocationInfo.right',
            'popupLocationInfo.bottom',
            'googleads.g.doubleclick.net',
            'ad.doubleclick.net',
            '.doubleclick.net',
            '.googleadservices.com',
            '.googlesyndication.com',
            'android.hardware.type.watch',
            'mraid.js',
            'google_inapp_purchase.db',
            'mobileads.google.com',
            'mobileads.google.com',
            'share_history.xml',
            'share_history.xml',
            'activity_choser_model_history.xml',
            'FragmentPager.SavedState{',
            'android.remoteinput.results',
            'android.people',
            'android.picture',
            'android.icon',
            'android.text',
            'android.title',
            'android.title.big',
            'FragmentTabHost.SavedState{',
            'android.remoteinput.results',
            'android.remoteinput.results',
            'android.remoteinput.results',
            'libcore.icu.ICU',
        ]

        file_list = []

        # Indicators
        url_list = []
        domain_list = []
        ip_list = []
        email_list = []

        # Build dynamic whitelist
        smali_dir = os.path.join(apktool_out_dir, "smali")
        for root, dirs, files in os.walk(smali_dir):
            if not files:
                continue
            else:
                skip_list.append(root.replace(smali_dir + "/", "").replace("/", "."))

            for cdir in dirs:
                skip_list.append(os.path.join(root, cdir).replace(smali_dir + "/", "").replace("/", "."))

        asset_dir = os.path.join(apktool_out_dir, "assets")
        if os.path.exists(asset_dir):
            for root, dirs, files in os.walk(asset_dir):
                if not files:
                    continue
                else:
                    for asset_file in files:
                        file_list.append(asset_file)
        skip_list = list(set(skip_list))

        # Find indicators
        proc = Popen(['grep', '-ER', r'(([[:alpha:]](-?[[:alnum:]])*)\.)*[[:alpha:]](-?[[:alnum:]])+\.[[:alpha:]]{2,}',
                      smali_dir], stdout=PIPE, stderr=PIPE)
        grep, _ = proc.communicate()
        for line in safe_str(grep).splitlines():
            file_path, line = line.split(":", 1)

            if "const-string" in line or "Ljava/lang/String;" in line:
                data = line.split("\"", 1)[1].split("\"")[0]
                data_low = data.lower()
                data_split = data.split(".")
                if data in file_list:
                    continue
                elif data in indicator_whitelist:
                    continue
                elif data.startswith("/"):
                    continue
                elif data_low.startswith("http://") or data_low.startswith('ftp://') or data_low.startswith('https://'):
                    url_list.append(data)
                elif len(data_split[0]) < len(data_split[-1]) and len(data_split[-1]) > 3:
                    continue
                elif data.startswith('android.') and data_low != data:
                    continue
                elif "/" in data and "." in data and data.index("/") < data.index("."):
                    continue
                elif " " in data:
                    continue
                elif data_split[0] in ['com', 'org', 'net', 'java']:
                    continue
                elif data_split[-1].lower() in ['so', 'properties', 'zip', 'read', 'id', 'store',
                                                'name', 'author', 'sh', 'soccer', 'fitness', 'news', 'video']:
                    continue
                elif data.endswith("."):
                    continue
                else:
                    do_skip = False
                    for skip in skip_list:
                        if data.startswith(skip):
                            do_skip = True
                            break

                    if do_skip:
                        continue

                    data = data.strip(".")

                    if is_valid_domain(data):
                        domain_list.append(data)
                    elif is_valid_ip(data):
                        ip_list.append(data)
                    elif is_valid_email(data):
                        email_list.append(data)

        url_list = list(set(url_list))
        for url in url_list:
            dom_ip = url.split("//")[1].split("/")[0]
            if ":" in dom_ip:
                dom_ip = dom_ip.split(":")[0]

            if is_valid_ip(dom_ip):
                ip_list.append(dom_ip)
            elif is_valid_domain(dom_ip):
                domain_list.append(dom_ip)

        ip_list = list(set(ip_list))
        domain_list = list(set(domain_list))
        email_list = list(set(email_list))

        if url_list or ip_list or domain_list or email_list:
            res_net = ResultSection("Network indicator(s) found", parent=result, heuristic=Heuristic(3))

            if url_list:
                res_url = ResultSection("Found urls in the decompiled code", parent=res_net)
                count = 0
                for url in url_list:
                    count += 1
                    if count <= 20:
                        res_url.add_line(url)
                    res_url.add_tag('network.static.uri', url)
                if count > 20:
                    res_url.add_line(f"and {count - 20} more...")

            if ip_list:
                res_ip = ResultSection("Found IPs in the decompiled code", parent=res_net)
                count = 0
                for ip in ip_list:
                    count += 1
                    if count <= 20:
                        res_ip.add_line(ip)
                    res_ip.add_tag('network.static.ip', ip)
                if count > 20:
                    res_ip.add_line(f"and {count - 20} more...")

            if domain_list:
                res_domain = ResultSection("Found domains in the decompiled code", parent=res_net)
                count = 0
                for domain in domain_list:
                    count += 1
                    if count <= 20:
                        res_domain.add_line(domain)
                    res_domain.add_tag('network.static.domain', domain)
                if count > 20:
                    res_domain.add_line(f"and {count - 20} more...")

            if email_list:
                res_email = ResultSection("Found email addresses in the decompiled code", parent=res_net)
                count = 0
                for email in email_list:
                    count += 1
                    if count <= 20:
                        res_email.add_line(email)
                    res_email.add_tag('network.email.address', email)
                if count > 20:
                    res_email.add_line(f"and {count - 20} more...")
예제 #8
0
    def ioc_to_tag(self,
                   data,
                   patterns,
                   res=None,
                   taglist=False,
                   check_length=False,
                   strs_max_size=0,
                   st_max_length=300):
        """Searches data for patterns and adds as AL tag to result output.

        Args:
            data: Data to be searched.
            patterns: FrankenStrings Patterns() object.
            res: AL result.
            taglist: True if tag list should be returned.
            check_length: True if length of string should be compared to st_max_length.
            strs_max_size: Maximum size of strings list. If greater then only network IOCs will be searched.
            st_max_length: Maximum length of a string from data that can be searched.

        Returns:
            If tag list has been requested, returns tag list as dictionary. Otherwise returns None.

        """

        tags = {}

        if check_length:
            ml = self.st_min_length
        else:
            ml = 4

        strs = set()
        jn = False

        # Flare-FLOSS ascii string extract
        for ast in strings.extract_ascii_strings(data, n=ml):
            if check_length:
                if len(ast.s) < st_max_length:
                    strs.add(ast.s)
            else:
                strs.add(ast.s)
        # Flare-FLOSS unicode string extract
        for ust in strings.extract_unicode_strings(data, n=ml):
            if check_length:
                if len(ust.s) < st_max_length:
                    strs.add(ust.s)
            else:
                strs.add(ust.s)

        if check_length:
            if len(strs) > strs_max_size:
                jn = True

        if len(strs) > 0:
            for s in strs:
                st_value = patterns.ioc_match(s,
                                              bogon_ip=True,
                                              just_network=jn)
                if len(st_value) > 0:
                    for ty, val in st_value.items():
                        if taglist and ty not in tags:
                            tags[ty] = set()
                        for v in val:
                            if ty == 'network.static.domain':
                                if not is_valid_domain(v.decode('utf-8')):
                                    continue
                            if ty == 'network.email.address':
                                if not is_valid_email(v.decode('utf-8')):
                                    continue
                            if len(v) < 1001:
                                if res is not None:
                                    res.add_tag(ty, safe_str(v))
                                if taglist:
                                    tags[ty].add(safe_str(v))
        if taglist:
            return tags
        else:
            return