def target(self, queue): # get all decompiled files that contains usage of TelephonyManager files = common.text_scan(common.java_files, self.telephonyManagerRegex) res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.getName(), percent=round(count * 100 / len(files))) # get decompiled file body fileName = f[1] with open(fileName, 'r') as fi: fileBody = fi.read() # report if file contains inline call if PluginUtil.contains(self.inlineRegex, fileBody): PluginUtil.reportInfo( fileName, self.PhoneIdentifierIssueDetails(fileName), res) break # report if any TelephonyManager variables invokes calls to get phone identifiers for varName in PluginUtil.returnGroupMatches( self.varNameRegex, 2, fileBody): if PluginUtil.contains( r'%s\.(getLine1Number|getDeviceId)\(.*?\)' % varName, fileBody): PluginUtil.reportInfo( fileName, self.PhoneIdentifierIssueDetails(fileName), res) break queue.put(res)
def target(self, queue): files = common.java_files global file_path api_key_list = [] res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) file_path = str(f) with open(file_path, 'r') as fi: file_content = fi.read() # Split the file content in each individual line for line in file_content.splitlines(): # Further split each line into words for word in line.split(): # Regex to check API value in work if re.search(self.API_KEY_REGEX, word): # Check if special character is present in the line. If "Yes, then ignore. # Avoid redundant display of line and filepath on the output screen if not re.search(self.SPECIAL_CHAR_REGEX, word) and line not in api_key_list: api_key_list.append("Line: " + line) api_key_list.append("Filepath: " + file_path + "\n") api_key_variable = "\n".join(api_key_list) if api_key_list: PluginUtil.reportInfo(file_path, hardcoded_api_key(api_key_variable), res) queue.put(res)
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for import_decl in tree.import_declarations: # Check if Intent is called in the import statement if 'Intent' in import_decl.name.value: with open(filepath, 'r') as r: file_body = r.read() if PluginUtil.contains(self.NEW_TASK, file_body): PluginUtil.reportInfo(filepath, new_task(filepath), res) break if PluginUtil.contains(self.MULTIPLE_TASK_TASK, file_body): PluginUtil.reportInfo(filepath, multiple_task(filepath), res) break except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue queue.put(res)
def target(self, queue): # get all decompiled files that contains usage of TelephonyManager files = common.text_scan(common.java_files, self.telephonyManagerRegex) res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.getName(), percent=round(count * 100 / len(files))) # get decompiled file body fileName = f[1] with open(fileName, 'r') as fi: fileBody = fi.read() # report if file contains inline call if PluginUtil.contains(self.inlineRegex, fileBody): PluginUtil.reportInfo(fileName, self.PhoneIdentifierIssueDetails(fileName), res) break # report if any TelephonyManager variables invokes calls to get phone identifiers for varName in PluginUtil.returnGroupMatches(self.varNameRegex, 2, fileBody): if PluginUtil.contains(r'%s\.(getLine1Number|getDeviceId)\(.*?\)' % varName, fileBody): PluginUtil.reportInfo(fileName, self.PhoneIdentifierIssueDetails(fileName), res) break queue.put(res)
def testReportIssue2(): res = [] PluginUtil.reportInfo('fileName', 'details', res) assert len(res) == 2 assert res[0].getCategory() == ExploitType.PLUGIN assert res[0].getSeverity() == Severity.INFO assert res[0].getFile() == 'fileName' assert res[0].getDetails() == 'details' assert res[1].getLevel() == Severity.INFO assert res[1].getData() == 'details'
def recursive_insecure_call_function(self, fields, file, res): if type(fields) is m.MethodDeclaration: if str(fields.name) == self.CALL_FUNCTION: PluginUtil.reportInfo(filepath, insecure_function(filepath), res) elif type(fields) is list: for fieldname in fields: self.recursive_insecure_call_function(fieldname, file, res) elif hasattr(fields, '_fields'): for fieldname in fields._fields: self.recursive_insecure_call_function(getattr(fields, fieldname), file, res) return
def recursive_classloader_function(self, fields, f, res): if type(fields) is m.MethodDeclaration: if str(fields.name) == self.CLASS_LOADER: PluginUtil.reportInfo(filepath, class_loader(filepath), res) elif self.CLASS_LOADER in str(fields): PluginUtil.reportInfo(filepath, class_loader(filepath), res) elif type(fields) is list: for tree_object_fields in fields: self.recursive_classloader_function(tree_object_fields, f, res) elif hasattr(fields, '_fields'): for values in fields._fields: self.recursive_classloader_function(getattr(fields, values), f, res) return
def recursive_insecure_call_function(self, fields, file, res): if type(fields) is m.MethodDeclaration: if str(fields.name) == self.CALL_FUNCTION: PluginUtil.reportInfo(filepath, insecure_function(filepath), res) elif type(fields) is list: for fieldname in fields: self.recursive_insecure_call_function(fieldname, file, res) elif hasattr(fields, '_fields'): for fieldname in fields._fields: self.recursive_insecure_call_function( getattr(fields, fieldname), file, res) return
def target(self, queue): files = common.java_files global parser, tree, fileName parser = plyj.Parser() tree = '' res = [] #List of Broadcast Receiver list_BR = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) fileName = str(f) try: tree = parser.parse_file(f) except Exception: continue try: for import_decl in tree.import_declarations: if self.DEX_CLASS_LOADER in import_decl.name.value: if self.CLASS_LOADER in str(tree): PluginUtil.reportInfo( fileName, self.DexClassLoaderIssueDetails(fileName), res) # This will check if app register's a broadcast receiver dynamically if self.DYNAMIC_BROADCAST_RECEIVER in str(tree): list_BR.append(fileName) except Exception: continue # Arrange the Broadcast Receivers created Dynamically in column format and store it in the variable -> Broadcast_Receiver Broadcast_Receiver = "\n".join(list_BR) if list_BR: PluginUtil.reportWarning( fileName, self.BroadcastReceiverIssueDetails(Broadcast_Receiver), res) queue.put(res)
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for import_decl in tree.import_declarations: if 'Service' in import_decl.name.value: with open(filepath, 'r') as r: data = r.read() if PluginUtil.contains(self.CHECK_PERMISSION, data): PluginUtil.reportInfo(filepath, check_permission(filepath), res) break if PluginUtil.contains(self.ENFORCE_PERMISSION, data): PluginUtil.reportInfo(filepath, enforce_permission(filepath), res) break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue queue.put(res)
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for import_decl in tree.import_declarations: # Check if Intent is called in the import statement if 'Intent' in import_decl.name.value: with open(filepath, 'r') as r: file_body = r.read() if PluginUtil.contains(self.NEW_TASK, file_body): PluginUtil.reportInfo(filepath, new_task(filepath), res) break if PluginUtil.contains(self.MULTIPLE_TASK_TASK, file_body): PluginUtil.reportInfo(filepath, multiple_task(filepath), res) break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue queue.put(res)
def target(self, queue): files = common.java_files global parser parser = plyj.Parser() global tree global fileName tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) fileName = str(f) try: tree = parser.parse_file(f) except Exception: continue try: global url url = [] for import_decl in tree.import_declarations: if 'HttpURLConnection' in import_decl.name.value or 'URL' in import_decl.name.value: textfile = str(open(fileName, 'r').read()) search = "http://" http_result = re.findall('\\b' + search + '\\b', textfile) if http_result: url = re.findall(self.http_url_regex, textfile) http_url_list = " \n".join(url) PluginUtil.reportInfo( fileName, self.HardcodedHTTPUrlsIssueDetails( (fileName, http_url_list)), res) break else: continue except Exception: continue queue.put(res)
def target(self, queue): files = common.java_files global parser parser = plyj.Parser() global tree global fileName tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) fileName = str(f) try: tree = parser.parse_file(f) except Exception: continue try: global url url = [] for import_decl in tree.import_declarations: if 'HttpURLConnection' in import_decl.name.value or 'URL' in import_decl.name.value: textfile = str(open(fileName, 'r').read()) search = "http://" http_result = re.findall('\\b'+search+'\\b', textfile) if http_result: url = re.findall(self.http_url_regex, textfile) http_url_list = " \n".join(url) PluginUtil.reportInfo(fileName, self.HardcodedHTTPUrlsIssueDetails((fileName, http_url_list)), res) break else: continue except Exception: continue queue.put(res)
def target(self, queue): f = str(common.manifest) res = [] count = 0 ordered_broadcast = [] path_variable_list =[] launch_mode_list =[] global fileName # full path to app manifest fileName = qarkMain.find_manifest_in_source() receivers = self.UserCreatedReceivers() for receiver in receivers: if "exported" and "true" in str(receiver): if not any(re.findall(self.PRIORITY_REGEX, str(receiver))): ordered_broadcast.append(str(receiver)) # Arrange exported broadcast receiver without priority set in column format list_orderedBR = " \n".join(ordered_broadcast) if ordered_broadcast: PluginUtil.reportWarning(fileName, self.OrderedBroadcastIssueDetails(list_orderedBR), res) for line in f.splitlines(): count += 1 # update progress bar pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(f.splitlines()))) if any(re.findall(self.PATH_USAGE, line)): path_variable_list.append(line) if any(re.findall(self.LAUNCH_MODE, line)): launch_mode_list.append(line) if any(re.findall(self.TASK_REPARENTING, line)): PluginUtil.reportInfo(fileName, self.TaskReparentingIssue(fileName), res) # Arrange identified path variable and launch mode usage in column format path_variable = " \n".join(path_variable_list) launch_mode_variable = "\n".join(launch_mode_list) if path_variable_list: PluginUtil.reportWarning(fileName, self.PathUsageIssue(path_variable), res) if launch_mode_list: PluginUtil.reportInfo(fileName, self.LaunchModeIssue(launch_mode_variable), res) # Check for google safebrowsing API if "WebView" in f.splitlines(): if "EnableSafeBrowsing" and "true" not in f.splitlines(): PluginUtil.reportInfo(fileName, self.SafebrowsingIssueDetails(fileName), res) # send all results back to main thread queue.put(res)
def target(self, queue): files = common.java_files parser = plyj.Parser() global filepath, tree tree = '' res = [] issues_list = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: global url url = [] for import_decl in tree.import_declarations: # Check import statements with value declared as WebView and WebSettings for the potential use of web views if 'WebView' in import_decl.name.value or 'WebSettings' in import_decl.name.value: with open(filepath, 'r') as r: data = r.read() if PluginUtil.contains(self.JAVASCRIPT_ENABLED, data): if PluginUtil.contains(self.MIXED_CONTENT, data): PluginUtil.reportWarning( filepath, mixed_content(filepath), res) if "setAllowFileAccess(false)" or "setAllowContentAccess(false)" not in data: if filepath not in issues_list: issues_list.append(filepath) if PluginUtil.contains(self.LOAD_URL_HTTP, data): PluginUtil.reportWarning(filepath, load_http_urls(filepath), res) break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue try: for import_decl in tree.import_declarations: if 'WebView' in import_decl.name.value or 'WebSettings' in import_decl.name.value: for type_decl in tree.type_declarations: # Check for class declaration in java source code and traverse further down the AST to find method names if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: if type(fields) is m.MethodDeclaration: if 'shouldOverrideUrlLoading' in fields.name: if 'true' not in str(fields.body): PluginUtil.reportWarning( filepath, url_override(filepath), res) break else: continue if 'shouldInterceptRequest' in fields.name: if 'null' in str(fields.body): PluginUtil.reportWarning( filepath, intercept_request( filepath), res) break else: continue break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue if issues_list: issue_name = " \n".join(issues_list) PluginUtil.reportInfo(filepath, secure_content(issue_name), res) queue.put(res)
def target(self, queue): f = str(common.manifest) res = [] count = 0 ordered_broadcast = [] path_variable_list = [] launch_mode_list = [] global fileName # full path to app manifest fileName = qarkMain.find_manifest_in_source() receivers = self.UserCreatedReceivers() for receiver in receivers: if "exported" and "true" in str(receiver): if not any(re.findall(self.PRIORITY_REGEX, str(receiver))): ordered_broadcast.append(str(receiver)) # Arrange exported broadcast receiver without priority set in column format list_orderedBR = " \n".join(ordered_broadcast) if ordered_broadcast: PluginUtil.reportWarning( fileName, self.OrderedBroadcastIssueDetails(list_orderedBR), res) for line in f.splitlines(): count += 1 # update progress bar pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(f.splitlines()))) if any(re.findall(self.PATH_USAGE, line)): path_variable_list.append(line) if any(re.findall(self.LAUNCH_MODE, line)): launch_mode_list.append(line) if any(re.findall(self.TASK_REPARENTING, line)): PluginUtil.reportInfo(fileName, self.TaskReparentingIssue(fileName), res) # Arrange identified path variable and launch mode usage in column format path_variable = " \n".join(path_variable_list) launch_mode_variable = "\n".join(launch_mode_list) if path_variable_list: PluginUtil.reportWarning(fileName, self.PathUsageIssue(path_variable), res) if launch_mode_list: PluginUtil.reportInfo(fileName, self.LaunchModeIssue(launch_mode_variable), res) # Check for google safebrowsing API if "WebView" in f.splitlines(): if "EnableSafeBrowsing" and "true" not in f.splitlines(): PluginUtil.reportInfo(fileName, self.SafebrowsingIssueDetails(fileName), res) # send all results back to main thread queue.put(res)
def target(self, queue): raw_file = str(common.manifest) # Split the raw file content into each individual line split_line = raw_file.splitlines() count = 0 # Create a list for each object ordered_broadcast, path_variable_list, launch_mode_list, api_key_list, res = ([] for _ in xrange(5)) global file_name # full path to app manifest file_name = qarkMain.find_manifest_in_source() receivers = self.UserCreatedReceivers() for receiver in receivers: if "exported" in str(receiver) and "true" in str(receiver): if not re.search(self.PRIORITY_REGEX, str(receiver)): ordered_broadcast.append(str(receiver)) # Arrange exported broadcast receiver without priority set in column format list_orderedBR = " \n".join(ordered_broadcast) if ordered_broadcast: PluginUtil.reportWarning(file_name, list_orderedBR, res) for line in split_line: count += 1 # update progress bar pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(split_line))) if re.search(self.PATH_USAGE, line): path_variable_list.append(line) if re.search(self.LAUNCH_MODE, line): launch_mode_list.append(line) if re.search(self.TASK_REPARENTING, line): PluginUtil.reportInfo(file_name, task_reparenting(file_name), res) if re.match(self.API_KEY_REGEX, line): # Check if special character is present in the line. If "Yes, then ignore. if not re.match(self.SPECIAL_CHAR_REGEX, line) and line not in api_key_list: api_key_list.append(line) # Additional check for hardcoded api keys which matches the syntax most commonly used with google API_KEY if re.search(self.HARDCODED_API_KEY, line) and line not in api_key_list: api_key_list.append(line) # Arrange identified path variable and launch mode usage in column format path_variable = " \n".join(path_variable_list) launch_mode_variable = "\n".join(launch_mode_list) api_key_variable = "\n".join(api_key_list) if path_variable_list: PluginUtil.reportWarning(file_name, path_usage(path_variable), res) if launch_mode_list: PluginUtil.reportInfo(file_name, task_launch_mode(launch_mode_variable), res) if api_key_list: PluginUtil.reportInfo(file_name, hardcoded_api_key(api_key_variable), res) # Check for google safe browsing API if "WebView" in split_line: if "EnableSafeBrowsing" not in split_line and "true" not in split_line: PluginUtil.reportInfo(file_name, google_safe_browsing(file_name), res) # send all results back to main thread queue.put(res)
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() total_debug_logs, total_verbose_logs, debug_logs, verbose_logs, verbose_logs_list, res, \ debug_logs_list, discovered_debug_logs, discovered_verbose_logs = ([] for _ in xrange(9)) tree = '' count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue # Traverse down the tree to find out verbose or debug logs try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: if type(fields) is m.MethodDeclaration: # Check if the app is send verbose logging message if str(fields.name) == 'v': verbose_logs.append(str(fields.name)) if filepath not in discovered_verbose_logs: discovered_verbose_logs.append( filepath) # Check if the app is send debug logging message elif str(fields.name) == 'd': debug_logs.append(str(fields.name)) if filepath not in discovered_debug_logs: discovered_debug_logs.append(filepath) except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue # Join all the filename and path containing debug and verbose logging debug_logs_path = " \n".join(discovered_debug_logs) verbose_logs_path = " \n".join(discovered_verbose_logs) queue.put(res) # Display the file paths of all discovered logs if discovered_debug_logs: x = len(debug_logs) PluginUtil.reportInfo(filepath, debug_log_issues(debug_logs_path, x), res) if discovered_verbose_logs: y = len(verbose_logs) PluginUtil.reportInfo(filepath, verbose_log_issues(verbose_logs_path, y), res) # Sometimes Log functions may be called from a constructor and hence maybe skipped by tree # if len(debug_logs) == 0 and len(verbose_logs) == 0: for f in files: with open(f, 'r') as fi: filename = fi.read() filepath = str(f) find_debug_logs = re.findall(self.debug_regex, filename) find_verbose_logs = re.findall(self.verbose_regex, filename) if find_debug_logs: total_debug_logs.append(str(find_debug_logs)) if filepath not in debug_logs_list: debug_logs_list.append(filepath) if find_verbose_logs: total_verbose_logs.append(str(find_verbose_logs)) if filepath not in verbose_logs_list: verbose_logs_list.append(filepath) debug_path = " \n".join(debug_logs_list) verbose_path = " \n".join(verbose_logs_list) if total_debug_logs: x = len(total_debug_logs) PluginUtil.reportInfo(filepath, debug_log_issues(debug_path, x), res) if total_verbose_logs: y = len(total_verbose_logs) PluginUtil.reportInfo(filepath, verbose_log_issues(verbose_path, y), res)
def target(self, queue): files = common.java_files global parser, tree, fileName, verbose, debug, debug_logs_path, verbose_logs_path parser = plyj.Parser() debug_logs = [] verbose_logs = [] discovered_debug_logs = [] discovered_verbose_logs = [] res = [] tree = '' count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) fileName = str(f) try: tree = parser.parse_file(f) except Exception as e: continue # Traverse down the tree to find out verbose or debug logs try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: if type(t) is m.MethodDeclaration: if str(t.name) == 'v': verbose_logs.append(str(t.name)) discovered_verbose_logs.append(fileName) elif str(t.name) == 'd': debug_logs.append(str(t.name)) discovered_debug_logs.append(fileName) except Exception: continue # Join all the filename and path containing debug and verbose logging debug_logs_path = " \n".join(discovered_debug_logs) verbose_logs_path = " \n".join(discovered_verbose_logs) queue.put(res) if discovered_debug_logs: PluginUtil.reportInfo(fileName, self.DebugLogsIssueDetails(debug_logs_path), res) if discovered_verbose_logs: PluginUtil.reportInfo( fileName, self.VerboseLogsIssueDetails(verbose_logs_path), res) # Provide the count of verbose/debug logs. # Written separately so that issue description is mentioned once and not repeated for each warning. if debug_logs or verbose_logs: x = str(len(debug_logs)) y = str(len(verbose_logs)) PluginUtil.reportInfo(fileName, self.LogIssueDetails((x, y)), res) global reg, reg1, filename len_reg = [] len_reg1 = [] # Sometimes Log functions may be called from a constructor and hence maybe skipped by tree if len(debug_logs) == 0 and len(verbose_logs) == 0: for f in files: with open(f, 'r') as fi: filename = fi.read() file_name = str(f) reg = re.findall(self.debug_regex, filename) reg1 = re.findall(self.verbose_regex, filename) if reg: len_reg.append(str(reg)) PluginUtil.reportInfo( filename, self.DebugLogsIssueDetails(file_name), res) if reg1: len_reg1.append(str(reg1)) PluginUtil.reportInfo( filename, self.VerboseLogsIssueDetails(file_name), res) if len_reg or len_reg1: x = str(len(len_reg)) y = str(len(len_reg1)) PluginUtil.reportInfo(filename, self.LogIssueDetails((x, y)), res)
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() total_debug_logs, total_verbose_logs, debug_logs, verbose_logs, verbose_logs_list, res, \ debug_logs_list, discovered_debug_logs, discovered_verbose_logs = ([] for _ in xrange(9)) tree = '' count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue # Traverse down the tree to find out verbose or debug logs try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: if type(fields) is m.MethodDeclaration: # Check if the app is send verbose logging message if str(fields.name) == 'v': verbose_logs.append(str(fields.name)) if filepath not in discovered_verbose_logs: discovered_verbose_logs.append(filepath) # Check if the app is send debug logging message elif str(fields.name) == 'd': debug_logs.append(str(fields.name)) if filepath not in discovered_debug_logs: discovered_debug_logs.append(filepath) except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue # Join all the filename and path containing debug and verbose logging debug_logs_path = " \n".join(discovered_debug_logs) verbose_logs_path = " \n".join(discovered_verbose_logs) queue.put(res) # Display the file paths of all discovered logs if discovered_debug_logs: x = len(debug_logs) PluginUtil.reportInfo(filepath, debug_log_issues(debug_logs_path, x), res) if discovered_verbose_logs: y = len(verbose_logs) PluginUtil.reportInfo(filepath, verbose_log_issues(verbose_logs_path, y), res) # Sometimes Log functions may be called from a constructor and hence maybe skipped by tree # if len(debug_logs) == 0 and len(verbose_logs) == 0: for f in files: with open(f, 'r') as fi: filename = fi.read() filepath = str(f) find_debug_logs = re.findall(self.debug_regex, filename) find_verbose_logs = re.findall(self.verbose_regex, filename) if find_debug_logs: total_debug_logs.append(str(find_debug_logs)) if filepath not in debug_logs_list: debug_logs_list.append(filepath) if find_verbose_logs: total_verbose_logs.append(str(find_verbose_logs)) if filepath not in verbose_logs_list: verbose_logs_list.append(filepath) debug_path = " \n".join(debug_logs_list) verbose_path = " \n".join(verbose_logs_list) if total_debug_logs: x = len(total_debug_logs) PluginUtil.reportInfo(filepath, debug_log_issues(debug_path, x), res) if total_verbose_logs: y = len(total_verbose_logs) PluginUtil.reportInfo(filepath, verbose_log_issues(verbose_path, y), res)
def target(self, queue): files = common.java_files parser = plyj.Parser() global filepath, tree tree = '' res = [] issues_list = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: global url url = [] for import_decl in tree.import_declarations: # Check import statements with value declared as WebView and WebSettings for the potential use of web views if 'WebView' in import_decl.name.value or 'WebSettings' in import_decl.name.value: with open(filepath, 'r') as r: data = r.read() if PluginUtil.contains(self.JAVASCRIPT_ENABLED, data): if PluginUtil.contains(self.MIXED_CONTENT, data): PluginUtil.reportWarning(filepath, mixed_content(filepath), res) if "setAllowFileAccess(false)" or "setAllowContentAccess(false)" not in data: if filepath not in issues_list: issues_list.append(filepath) if PluginUtil.contains(self.LOAD_URL_HTTP, data): PluginUtil.reportWarning(filepath, load_http_urls(filepath), res) break except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue try: for import_decl in tree.import_declarations: if 'WebView' in import_decl.name.value or 'WebSettings' in import_decl.name.value: for type_decl in tree.type_declarations: # Check for class declaration in java source code and traverse further down the AST to find method names if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: if type(fields) is m.MethodDeclaration: if 'shouldOverrideUrlLoading' in fields.name: if 'true' not in str(fields.body): PluginUtil.reportWarning(filepath, url_override(filepath), res) break else: continue if 'shouldInterceptRequest' in fields.name: if 'null' in str(fields.body): PluginUtil.reportWarning(filepath, intercept_request(filepath), res) break else: continue break except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue if issues_list: issue_name = " \n".join(issues_list) PluginUtil.reportInfo(filepath, secure_content(issue_name), res) queue.put(res)