def target(self, queue): # get all decompiled files that contains usage of TelephonyManager files = common.text_scan(common.java_files, self.telephonyManagerRegex) res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.getName(), percent=round(count * 100 / len(files))) # get decompiled file body fileName = f[1] with open(fileName, 'r') as fi: fileBody = fi.read() # report if file contains inline call if PluginUtil.contains(self.inlineRegex, fileBody): PluginUtil.reportInfo(fileName, self.PhoneIdentifierIssueDetails(fileName), res) break # report if any TelephonyManager variables invokes calls to get phone identifiers for varName in PluginUtil.returnGroupMatches(self.varNameRegex, 2, fileBody): if PluginUtil.contains(r'%s\.(getLine1Number|getDeviceId)\(.*?\)' % varName, fileBody): PluginUtil.reportInfo(fileName, self.PhoneIdentifierIssueDetails(fileName), res) break queue.put(res)
def target(self, queue): files = common.java_files global file_path api_key_list = [] res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) file_path = str(f) with open(file_path, 'r') as fi: file_content = fi.read() # Split the file content in each individual line for line in file_content.splitlines(): # Further split each line into words for word in line.split(): # Regex to check API value in work if re.search(self.API_KEY_REGEX, word): # Check if special character is present in the line. If "Yes, then ignore. # Avoid redundant display of line and filepath on the output screen if not re.search(self.SPECIAL_CHAR_REGEX, word) and line not in api_key_list: api_key_list.append("Line: " + line) api_key_list.append("Filepath: " + file_path + "\n") api_key_variable = "\n".join(api_key_list) if api_key_list: PluginUtil.reportInfo(file_path, hardcoded_api_key(api_key_variable), res) queue.put(res)
def target(self, queue): results = [] possibleFiles = common.text_scan(common.java_files, r'API_KEY') count = 0 for f in possibleFiles: count += 1 pub.sendMessage('progress', bar=self.getName(), percent=round(count * 100 / len(possibleFiles))) common.logger.debug("Text found, " + str(f)) issue = ReportIssue() issue.setCategory(ExploitType.PLUGIN) issue.setDetails( "The string 'API_KEY' appears in the file: %s\n%s" % (f[1], str(f[0]))) issue.setFile(str(f[1])) issue.setSeverity(Severity.VULNERABILITY) results.append(issue) issue = terminalPrint() issue.setLevel(Severity.VULNERABILITY) issue.setData("The string 'API_KEY' appears in the file: %s\n%s" % (f[1], str(f[0]))) results.append(issue) queue.put(results)
def main(queue): global parser global tree results = [] count = 0 if common.minSdkVersion<19: weak_rng_warning(results) find_key_files(results) for j in common.java_files: count = count + 1 pub.sendMessage('progress', bar='Crypto issues', percent=round(count*100/common.java_files.__len__())) try: tree=parser.parse_file(j) if tree is not None: #if re.search(r'\.getInstance\(',str(tree)): # print "YES" for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: try: recursive_ecb_check(t,j,results) #fixedSeedCheck(t,j) except Exception as e: common.logger.debug("Error running recursive_ecb_check in cryptoFlaws.py: " + str(e)) report.write("parsingerror-issues-list", "Error running recursive_ecb_check in cryptoFlaws.py: " + str(e), "strong") except Exception as e: common.logger.debug("Unable to create tree for " + str(j)) report.write("parsingerror-issues-list", "Unable to create tree for " + str(j), "strong") queue.put(results) return
def target(self, queue): results = [] #TODO: add documentation for available API calls. Sample shown below. # Here, we want to scan all decompiled files to see if any file contains the text "pass" possibleFiles = common.text_scan(common.java_files, r'pass') count = 0 for f in possibleFiles: count += 1 # The following call generates the progress bar in the terminal output pub.sendMessage('progress', bar=self.getName(), percent=round(count*100/len(possibleFiles))) # Mostly for logging. This goes in the log file generated under /logs common.logger.debug("Text found, " + str(f)) issue = ReportIssue() # This will put individual results of the plugin scan in the HTML report. issue.setCategory(ExploitType.PLUGIN) issue.setDetails("The string 'pass' appears in the file: %s\n%s" % (f[1], str(f[0]))) issue.setFile(str(f[1])) issue.setSeverity(Severity.VULNERABILITY) results.append(issue) # This puts individual results of the plugin scan in the terminal output. issue = terminalPrint() issue.setLevel(Severity.VULNERABILITY) issue.setData("The string 'pass' appears in the file: %s\n%s" % (f[1], str(f[0]))) results.append(issue) # This is required to send the complete list of results (including the ones to be printed on terminal as well as # issues to be printed in tht HTML report) back to the main thread. queue.put(results)
def start(queue,height): """ Start finding pending intents """ results = [] global tree global current_file count = 0 #TODO - Look for use of fillIn method which can make this a much more exploitable condition for j in common.java_files: count = count + 1 pub.sendMessage('progress', count1=None, count2=round(count*100/common.java_files.__len__()), count3=None) current_file=j tree=parser.parse_file(j) #TODO - Need to add scanning of the imports, to see if Intent or PendingIntent is extended, was working on it, #but the one issue where it arose was non-trivial, so I gave up for now if hasattr(tree,'type_declarations'): for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: for f in t._fields: #dynamically parse each token where is f is the field and t is the token try: recurse(f,t,results) except Exception as e: common.logger.debug("Problem in recurse function of findPending.py: " + str(e)) common.parsingerrors.add(str(current_file)) else: common.logger.debug("No type declarations: " + str(j)) report.write("parsingerror-issues-list", str(current_file), "strong") queue.put(results) return
def start(queue, height): """ Start finding pending intents """ results = [] global tree global current_file count = 0 # TODO - Look for use of fillIn method which can make this a much more exploitable condition for j in common.java_files: count = count + 1 pub.sendMessage("progress", count1=None, count2=round(count * 100 / common.java_files.__len__()), count3=None) current_file = j tree = parser.parse_file(j) # TODO - Need to add scanning of the imports, to see if Intent or PendingIntent is extended, was working on it, # but the one issue where it arose was non-trivial, so I gave up for now if hasattr(tree, "type_declarations"): for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: for f in t._fields: # dynamically parse each token where is f is the field and t is the token try: recurse(f, t, results) except Exception as e: common.logger.debug("Problem in recurse function of findPending.py: " + str(e)) common.parsingerrors.add(str(current_file)) else: common.logger.debug("No type declarations: " + str(j)) report.write("parsingerror-issues-list", str(current_file), "strong") queue.put(results) return
def target(self, queue): # get all decompiled files that contains usage of WebView files = common.text_scan(common.java_files, self.webViewRegex) res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.getName(), percent=round(count * 100 / len(files))) # get decompiled file body fileName = f[1] with open(fileName, 'r') as fi: fileBody = fi.read() # report if file contains any inline calls if PluginUtil.contains(self.inlineRegex, fileBody): PluginUtil.reportIssue(fileName, self.createIssueDetails(fileName), res) break # report if any WebView variables invoke calls for varName in PluginUtil.returnGroupMatches( self.varNameRegex, 2, fileBody): if PluginUtil.contains( r'%s\.addJavascriptInterface\(.*?\)' % varName, fileBody): PluginUtil.reportIssue(fileName, self.createIssueDetails(fileName), res) break queue.put(res)
def target(self, queue): results = [] #TODO: add documentation for available API calls. Sample shown below. # Here, we want to scan all decompiled files to see if any file contains the text "API_KEY" possibleFiles = common.text_scan(common.java_files, r'API_KEY') count = 0 for f in possibleFiles: count += 1 # The following call generates the progress bar in the terminal output pub.sendMessage('progress', bar=self.getName(), percent=round(count*100/len(possibleFiles))) # Mostly for logging. This goes in the log file generated under /logs common.logger.debug("Text found, " + str(f)) # This will put individual results of the plugin scan in the HTML report. issue = ReportIssue() issue.setCategory(ExploitType.PLUGIN) issue.setDetails("The string 'API_KEY' appears in the file: %s\n%s" % (f[1], str(f[0]))) issue.setFile(str(f[1])) issue.setSeverity(Severity.VULNERABILITY) results.append(issue) # This puts individual results of the plugin scan in the terminal output. issue = terminalPrint() issue.setLevel(Severity.VULNERABILITY) issue.setData("The string 'API_KEY' appears in the file: %s\n%s" % (f[1], str(f[0]))) results.append(issue) # This is required to send the complete list of results (including the ones to be printed on terminal as well as # issues to be printed in tht HTML report) back to the main thread. queue.put(results)
def target(self, queue): # get all decompiled files that contains usage of WebView files = common.text_scan(common.java_files, self.webViewRegex) res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.getName(), percent=round(count * 100 / len(files))) # get decompiled file body fileName = f[1] with open(fileName, 'r') as fi: fileBody = fi.read() # report if file contains any inline calls if PluginUtil.contains(self.inlineRegex, fileBody): PluginUtil.reportIssue(fileName, self.createIssueDetails(fileName), res) break # report if any WebView variables invoke calls for varName in PluginUtil.returnGroupMatches(self.varNameRegex, 2, fileBody): if PluginUtil.contains(r'%s\.addJavascriptInterface\(.*?\)' % varName, fileBody): PluginUtil.reportIssue(fileName, self.createIssueDetails(fileName), res) break queue.put(res)
def target(self, queue): files = common.java_files global parser, tree, filepath parser = plyj.Parser() tree = '' global res res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception("Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: try: self.recursive_insecure_call_function(fields, f, res) except Exception as e: common.logger.exception("Unable to run insecure function plugin " + str(e)) except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue queue.put(res)
def target(self, queue): # get all decompiled files that contains usage of TelephonyManager files = common.text_scan(common.java_files, self.telephonyManagerRegex) res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.getName(), percent=round(count * 100 / len(files))) # get decompiled file body fileName = f[1] with open(fileName, 'r') as fi: fileBody = fi.read() # report if file contains inline call if PluginUtil.contains(self.inlineRegex, fileBody): PluginUtil.reportInfo( fileName, self.PhoneIdentifierIssueDetails(fileName), res) break # report if any TelephonyManager variables invokes calls to get phone identifiers for varName in PluginUtil.returnGroupMatches( self.varNameRegex, 2, fileBody): if PluginUtil.contains( r'%s\.(getLine1Number|getDeviceId)\(.*?\)' % varName, fileBody): PluginUtil.reportInfo( fileName, self.PhoneIdentifierIssueDetails(fileName), res) break queue.put(res)
def cfr(path, dirname): """ calls the cfr decompiler from command line """ process = subprocess.Popen([ "java", "-jar", common.rootDir + "/lib/cfr_0_115.jar", path, "--outputdir", dirname + "1" ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) try: while True: line = process.stdout.readline() if not line: break if "Processing" in line: common.counter1 = common.counter1 + 1 pub.sendMessage('decompile', cfr=round(common.counter1 * 100 / common.count)) pub.sendMessage('decompile', jdcore=round(common.counter1 * 100 / common.count)) except Exception as e: logger.debug(e.message)
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for import_decl in tree.import_declarations: # Check if Intent is called in the import statement if 'Intent' in import_decl.name.value: with open(filepath, 'r') as r: file_body = r.read() if PluginUtil.contains(self.NEW_TASK, file_body): PluginUtil.reportInfo(filepath, new_task(filepath), res) break if PluginUtil.contains(self.MULTIPLE_TASK_TASK, file_body): PluginUtil.reportInfo(filepath, multiple_task(filepath), res) break except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue queue.put(res)
def target(self, queue): files = common.java_files global parser, tree, filepath parser = plyj.Parser() tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: # Parse the java file to an AST tree = parser.parse_file(f) except Exception as e: common.logger.exception("Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for import_decl in tree.import_declarations: # Check if DexClassLoader is called in the import statement; example import dalvik.system.DexClassLoader if self.DEX_CLASS_LOADER in import_decl.name.value: for type_decl in tree.type_declarations: # Check class declaration within the java source code if type(type_decl) is m.ClassDeclaration: # Traverse through every field declared in the class for fields in type_decl.body: try: self.recursive_classloader_function(fields, f, res) except Exception as e: common.logger.error("Unable to run class loader plugin " + str(e)) except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue try: for type_decl in tree.type_declarations: # Check class declaration within the java source code if type(type_decl) is m.ClassDeclaration: # Traverse through every field declared in the class for fields in type_decl.body: try: self.recursive_register_receiver_function(fields, f, res) except Exception as e: common.logger.error("Unable to run register receiver function plugin " + str(e)) except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue # Arrange the Broadcast Receivers created Dynamically in column format and store it in the variable -> Broadcast_Receiver br_list = "\n".join(receivers_list) if receivers_list: # Report the issue in the file and display it on the terminal PluginUtil.reportWarning(filepath, broadcast_receiver(br_list), res) queue.put(res)
def validate(queue,height): """ Validates common pitfalls for certificate validation logic """ #writer1 = common.Writer((0, height)) results = [] global tree global parser global filename global warningGiven global sslSessions count = 0 for j in common.java_files: sslSessions=[] count = count + 1 pub.sendMessage('progress', count1=round(count*100/common.java_files.__len__()), count2=None, count3=None, count4=None) filename=str(j) try: tree=parser.parse_file(j) except Exception as e: continue # No need to log this since we now warn potential parsing errors on screen and provide details in the report. if tree is None: results.append("Some files may not be parsed correctly. For a list of such files, please review the final report.") else: try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: try: recursive_insecure_trust_manager(t,j,results) except Exception as e: common.logger.error("Unable to run recursive_insecure_trust_manager in certValidation.py: " + str(e)) try: recursive_allow_all_hostname_verifier(t,j,results) except Exception as e: common.logger.error("Unable to run recursive_allow_all_hostname_verifier in certValidation.py: " + str(e)) try: recursive_ssl_session(t,j,results) except Exception as e: common.logger.error("Unable to run recursive_ssl_session in certValidation.py: " + str(e)) except Exception as e: if common.source_or_apk==2: common.logger.error("Error in validate function of certValidation.py: " + str(e)) else: common.logger.error("Bad file: " + str(j) + ", this is not uncommon") try: recursive_find_verify(None,j,results) except Exception as e: common.logger.error("Problem in findVerify function of certValidation.py: " + str(e)) warningGiven=False unverified_sessions(results) queue.put(results) return
def apktool(pathToAPK): manifest = "" # If path to APK is /foo/bar/temp/myapp.apk # Create /temp/foo/apktool # Run java -jar apktool_2.1.0.jar d /foo/bar/temp/myapp.apk --no-src --force -m --output /foo/bar/temp/apktool/ # read AndroidManifest.xml and return the content apktool = subprocess.call(['java', '-Djava.awt.headless=true','-jar', common.rootDir + '/lib/apktool_2.1.0.jar', 'd', pathToAPK, '--no-src', '--force', '-m','--output', str(pathToAPK.rsplit(".",1)[0]).rsplit("/",1)[0] + "/apktool"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) print str(pathToAPK.rsplit(".",1)[0]).rsplit("/",1)[0] + "/apktool" + "/AndroidManifest.xml" with open (str(pathToAPK.rsplit(".",1)[0]).rsplit("/",1)[0] + "/apktool" + "/AndroidManifest.xml", "r") as f: manifest = f.read() pub.sendMessage('manifest', mf=manifest) return
def target(self, queue): f = str(common.manifest) res = [] count = 0 ordered_broadcast = [] path_variable_list =[] launch_mode_list =[] global fileName # full path to app manifest fileName = qarkMain.find_manifest_in_source() receivers = self.UserCreatedReceivers() for receiver in receivers: if "exported" and "true" in str(receiver): if not any(re.findall(self.PRIORITY_REGEX, str(receiver))): ordered_broadcast.append(str(receiver)) # Arrange exported broadcast receiver without priority set in column format list_orderedBR = " \n".join(ordered_broadcast) if ordered_broadcast: PluginUtil.reportWarning(fileName, self.OrderedBroadcastIssueDetails(list_orderedBR), res) for line in f.splitlines(): count += 1 # update progress bar pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(f.splitlines()))) if any(re.findall(self.PATH_USAGE, line)): path_variable_list.append(line) if any(re.findall(self.LAUNCH_MODE, line)): launch_mode_list.append(line) if any(re.findall(self.TASK_REPARENTING, line)): PluginUtil.reportInfo(fileName, self.TaskReparentingIssue(fileName), res) # Arrange identified path variable and launch mode usage in column format path_variable = " \n".join(path_variable_list) launch_mode_variable = "\n".join(launch_mode_list) if path_variable_list: PluginUtil.reportWarning(fileName, self.PathUsageIssue(path_variable), res) if launch_mode_list: PluginUtil.reportInfo(fileName, self.LaunchModeIssue(launch_mode_variable), res) # Check for google safebrowsing API if "WebView" in f.splitlines(): if "EnableSafeBrowsing" and "true" not in f.splitlines(): PluginUtil.reportInfo(fileName, self.SafebrowsingIssueDetails(fileName), res) # send all results back to main thread queue.put(res)
def text_scan(file_list, rex_n): """ Given a list of files, search content of each file by the regular expression and return a list of matches """ result_list = [] result_list.append([]) count = 0 for x in file_list: count = count + 1 pub.sendMessage('progress', bar='Webview checks', percent=round(count * 100 / common.java_files.__len__())) result = common.read_files(x, rex_n) if len(result) > 0: result_list.append([result, x]) return result_list
def text_scan(file_list,rex_n): """ Given a list of files, search content of each file by the regular expression and return a list of matches """ result_list=[] result_list.append([]) count = 0 for x in file_list: count = count + 1 pub.sendMessage('progress', count1=None, count2=None, count3=None, count4=round(count*100/common.java_files.__len__())) result=common.read_files(x,rex_n) if len(result)>0: result_list.append([result,x]) return result_list
def text_scan(file_list, rex_n): """ Given a list of files, search content of each file by the regular expression and return a list of matches """ count = 0 result_list = [] result_list.append([]) for x in file_list: count = count + 1 # pbar.update(round(count*100/common.java_files.__len__())) pub.sendMessage('progress', bar='File Permissions', percent=round(count * 100 / common.java_files.__len__())) result = common.read_files(x, rex_n) if len(result) > 0: result_list.append([result, x]) return result_list
def procyon(path,dirname): """ calls the procyon decompiler from command line """ process = subprocess.Popen(["java","-jar", common.rootDir + "/lib/procyon/procyon-decompiler-0.5.29.jar", path, "-o ", dirname+"2"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) try: while True: line = process.stdout.readline() if not line: break if "Decompiling" in line: common.counter2 = common.counter2 + 1 pub.sendMessage('decompile', procyon=round(common.counter2*100/common.count)) except Exception as e: logger.debug(e.message)
def cfr(path,dirname): """ calls the cfr decompiler from command line """ process = subprocess.Popen(["java","-jar", common.rootDir + "/lib/cfr_0_96.jar", path, "--outputdir", dirname+"1"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) try: while True: line = process.stdout.readline() if not line: break if "Processing" in line: common.counter1 = common.counter1 + 1 pub.sendMessage('decompile', cfr=round(common.counter1*100/common.count)) pub.sendMessage('decompile', jdcore=round(common.counter1*100/common.count)) except Exception as e: logger.debug(e.message)
def main(queue): global current_file global parser global tree results = [] count = 0 common.logger.debug("Checking for any broadcasts sent from this app......") for j in common.java_files: count = count + 1 pub.sendMessage('progress', count1=None, count2=None, count3=None, count4=None, count5=round(count * 100 / common.java_files.__len__())) current_file = j try: tree = parser.parse_file(j) if type(tree) is not None: if hasattr(tree, 'type_declarations'): for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: try: recursiveBroadcastFinder(t, results) except Exception as e: common.parsingerrors.add(str(j)) common.logger.debug( "Unable to process recursiveBroadcastFinder in findBroadcasts.py: " + str(e)) elif type(type_decl) is list: for y in type_decl: recursiveBroadcastFinder(y, results) elif hasattr(type_decl, '_fields'): for d in type_decl._fields: recursiveBroadcastFinder( getattr(type_decl, d), results) else: common.logger.debug("Unable to create tree for " + str(j)) except Exception as e: common.logger.debug( "Tree exception during broadcast processing: " + str(e)) common.parsingerrors.add(str(j)) queue.put(results) return
def target(self, queue): files = common.java_files global parser, tree, fileName parser = plyj.Parser() tree = '' res = [] #List of Broadcast Receiver list_BR = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) fileName = str(f) try: tree = parser.parse_file(f) except Exception: continue try: for import_decl in tree.import_declarations: if self.DEX_CLASS_LOADER in import_decl.name.value: if self.CLASS_LOADER in str(tree): PluginUtil.reportInfo( fileName, self.DexClassLoaderIssueDetails(fileName), res) # This will check if app register's a broadcast receiver dynamically if self.DYNAMIC_BROADCAST_RECEIVER in str(tree): list_BR.append(fileName) except Exception: continue # Arrange the Broadcast Receivers created Dynamically in column format and store it in the variable -> Broadcast_Receiver Broadcast_Receiver = "\n".join(list_BR) if list_BR: PluginUtil.reportWarning( fileName, self.BroadcastReceiverIssueDetails(Broadcast_Receiver), res) queue.put(res)
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for import_decl in tree.import_declarations: # Check if Intent is called in the import statement if 'Intent' in import_decl.name.value: with open(filepath, 'r') as r: file_body = r.read() if PluginUtil.contains(self.NEW_TASK, file_body): PluginUtil.reportInfo(filepath, new_task(filepath), res) break if PluginUtil.contains(self.MULTIPLE_TASK_TASK, file_body): PluginUtil.reportInfo(filepath, multiple_task(filepath), res) break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue queue.put(res)
def text_scan(file_list, rex_n): """ Given a list of files, search content of each file by the regular expression and return a list of matches """ count = 0 result_list = [] result_list.append([]) for x in file_list: count = count + 1 #pbar.update(round(count*100/common.java_files.__len__())) pub.sendMessage('progress', count1=None, count2=None, count3=round(count * 100 / common.java_files.__len__())) result = common.read_files(x, rex_n) if len(result) > 0: result_list.append([result, x]) return result_list
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for import_decl in tree.import_declarations: if 'Service' in import_decl.name.value: with open(filepath, 'r') as r: data = r.read() if PluginUtil.contains(self.CHECK_PERMISSION, data): PluginUtil.reportInfo(filepath, check_permission(filepath), res) break if PluginUtil.contains(self.ENFORCE_PERMISSION, data): PluginUtil.reportInfo(filepath, enforce_permission(filepath), res) break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue queue.put(res)
def target(self, queue): files = common.java_files global parser parser = plyj.Parser() global tree global fileName tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) fileName = str(f) try: tree = parser.parse_file(f) except Exception: continue try: global url url = [] for import_decl in tree.import_declarations: if 'HttpURLConnection' in import_decl.name.value or 'URL' in import_decl.name.value: textfile = str(open(fileName, 'r').read()) search = "http://" http_result = re.findall('\\b' + search + '\\b', textfile) if http_result: url = re.findall(self.http_url_regex, textfile) http_url_list = " \n".join(url) PluginUtil.reportInfo( fileName, self.HardcodedHTTPUrlsIssueDetails( (fileName, http_url_list)), res) break else: continue except Exception: continue queue.put(res)
def main(queue): global parser global tree results = [] count = 0 if common.minSdkVersion < 19: weak_rng_warning(results) find_key_files(results) for j in common.java_files: count = count + 1 pub.sendMessage('progress', bar='Crypto issues', percent=round(count * 100 / common.java_files.__len__())) try: tree = parser.parse_file(j) if tree is not None: #if re.search(r'\.getInstance\(',str(tree)): # print "YES" for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: try: recursive_ecb_check(t, j, results) #fixedSeedCheck(t,j) except Exception as e: common.logger.debug( "Error running recursive_ecb_check in cryptoFlaws.py: " + str(e)) report.write( "parsingerror-issues-list", "Error running recursive_ecb_check in cryptoFlaws.py: " + str(e), "strong") except Exception as e: common.logger.debug("Unable to create tree for " + str(j)) report.write("parsingerror-issues-list", "Unable to create tree for " + str(j), "strong") queue.put(results) return
def target(self, queue): files = common.java_files global parser, tree, filepath parser = plyj.Parser() tree = '' global res res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: try: self.recursive_insecure_call_function( fields, f, res) except Exception as e: common.logger.exception( "Unable to run insecure function plugin " + str(e)) except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue queue.put(res)
def target(self, queue): results = [] possibleFiles = common.text_scan(common.java_files, r'pass') count = 0 for f in possibleFiles: count += 1 pub.sendMessage('progress', bar=self.getName(), percent=round(count*100/len(possibleFiles))) common.logger.debug("Text found, " + str(f)) issue = ReportIssue() issue.setCategory(ExploitType.PLUGIN) issue.setDetails("The string 'pass' appears in the file: %s\n%s" % (f[1], str(f[0]))) issue.setFile(str(f[1])) issue.setSeverity(Severity.VULNERABILITY) results.append(issue) issue = terminalPrint() issue.setLevel(Severity.VULNERABILITY) issue.setData("The string 'pass' appears in the file: %s\n%s" % (f[1], str(f[0]))) results.append(issue) queue.put(results)
def main(queue): global current_file global parser global tree results = [] count = 0 common.logger.debug("Checking for any broadcasts sent from this app......") for j in common.java_files: count = count + 1 pub.sendMessage('progress', bar="Broadcast issues", percent=round(count * 100 / common.java_files.__len__())) current_file = j try: tree = parser.parse_file(j) if type(tree) is not None: if hasattr(tree, 'type_declarations'): for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: try: recursive_broadcast_finder(t, results) except Exception as e: common.parsingerrors.add(str(j)) common.logger.debug( "Unable to process recursive_broadcast_finder in findBroadcasts.py: " + str(e)) elif type(type_decl) is list: for y in type_decl: recursive_broadcast_finder(y, results) elif hasattr(type_decl, '_fields'): for d in type_decl._fields: recursive_broadcast_finder(getattr(type_decl, d), results) else: common.logger.debug("Unable to create tree for " + str(j)) except Exception as e: common.logger.debug("Tree exception during broadcast processing: " + str(e)) common.parsingerrors.add(str(j)) queue.put(results) return
def target(self, queue): permissions = self.getUserCreatedPermissions() # full path to app manifest manifest_path = qarkMain.find_manifest_in_source() # plugin scan results results = [] count = 0 for permission in permissions: count += 1 # update progress bar pub.sendMessage('progress', bar=self.getName(), percent=round(count * 100 / len(permissions))) # put results in HTML report issue = ReportIssue() issue.setCategory(ExploitType.PLUGIN) issue.setSeverity(Severity.VULNERABILITY) issue.setFile(manifest_path) details = "" if self.isDangerousPermission(permission): # found permission with protection level set to "dangerous" details += "User created permission with DANGEROUS protection level: %s" % permission else: details += "User created permission: %s" % permission issue.setDetails(details) results.append(issue) # put results in terminal output issue = terminalPrint() issue.setLevel(Severity.VULNERABILITY) issue.setData(details) results.append(issue) # send all results back to main thread queue.put(results)
def target(self, queue): permissions = self.getUserCreatedPermissions() # full path to app manifest manifest_path = qarkMain.find_manifest_in_source() # plugin scan results results = [] count = 0 for permission in permissions: count += 1 # update progress bar pub.sendMessage("progress", bar=self.getName(), percent=round(count * 100 / len(permissions))) # put results in HTML report issue = ReportIssue() issue.setCategory(ExploitType.PLUGIN) issue.setSeverity(Severity.VULNERABILITY) issue.setFile(manifest_path) details = "" if self.isDangerousPermission(permission): # found permission with protection level set to "dangerous" details += "User created permission with DANGEROUS protection level: %s" % permission else: details += "User created permission: %s" % permission issue.setDetails(details) results.append(issue) # put results in terminal output issue = terminalPrint() issue.setLevel(Severity.VULNERABILITY) issue.setData(details) results.append(issue) # send all results back to main thread queue.put(results)
def procyon(path, dirname): """ calls the procyon decompiler from command line """ process = subprocess.Popen([ "java", "-jar", common.rootDir + "/lib/procyon/procyon-decompiler-0.5.30.jar", path, "-o ", dirname + "2" ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) try: while True: line = process.stdout.readline() if not line: break if "Decompiling" in line: common.counter2 = common.counter2 + 1 pub.sendMessage('decompile', procyon=round(common.counter2 * 100 / common.count)) except Exception as e: logger.debug(e.message)
def target(self, queue): files = common.java_files global parser parser = plyj.Parser() global tree global fileName tree = '' res = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) fileName = str(f) try: tree = parser.parse_file(f) except Exception: continue try: global url url = [] for import_decl in tree.import_declarations: if 'HttpURLConnection' in import_decl.name.value or 'URL' in import_decl.name.value: textfile = str(open(fileName, 'r').read()) search = "http://" http_result = re.findall('\\b'+search+'\\b', textfile) if http_result: url = re.findall(self.http_url_regex, textfile) http_url_list = " \n".join(url) PluginUtil.reportInfo(fileName, self.HardcodedHTTPUrlsIssueDetails((fileName, http_url_list)), res) break else: continue except Exception: continue queue.put(res)
def target(self, queue): files = common.java_files parser = plyj.Parser() global filepath, tree tree = '' res = [] issues_list = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: global url url = [] for import_decl in tree.import_declarations: # Check import statements with value declared as WebView and WebSettings for the potential use of web views if 'WebView' in import_decl.name.value or 'WebSettings' in import_decl.name.value: with open(filepath, 'r') as r: data = r.read() if PluginUtil.contains(self.JAVASCRIPT_ENABLED, data): if PluginUtil.contains(self.MIXED_CONTENT, data): PluginUtil.reportWarning( filepath, mixed_content(filepath), res) if "setAllowFileAccess(false)" or "setAllowContentAccess(false)" not in data: if filepath not in issues_list: issues_list.append(filepath) if PluginUtil.contains(self.LOAD_URL_HTTP, data): PluginUtil.reportWarning(filepath, load_http_urls(filepath), res) break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue try: for import_decl in tree.import_declarations: if 'WebView' in import_decl.name.value or 'WebSettings' in import_decl.name.value: for type_decl in tree.type_declarations: # Check for class declaration in java source code and traverse further down the AST to find method names if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: if type(fields) is m.MethodDeclaration: if 'shouldOverrideUrlLoading' in fields.name: if 'true' not in str(fields.body): PluginUtil.reportWarning( filepath, url_override(filepath), res) break else: continue if 'shouldInterceptRequest' in fields.name: if 'null' in str(fields.body): PluginUtil.reportWarning( filepath, intercept_request( filepath), res) break else: continue break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue if issues_list: issue_name = " \n".join(issues_list) PluginUtil.reportInfo(filepath, secure_content(issue_name), res) queue.put(res)
def target(self, queue): global filepath, tree files = common.java_files parser = plyj.Parser() tree = '' external_pub_dir, external_media, external_storage, res = ( [] for _ in xrange(4)) count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: for import_decl in tree.import_declarations: if 'File' in import_decl.name.value: with open(filepath, 'r') as fr: file_body = fr.read() if PluginUtil.contains(self.CHECK_EXTERNAL_STORAGE, file_body): external_storage.append(filepath) break if PluginUtil.contains(self.CHECK_EXTERNAL_MEDIA, file_body): external_media.append(filepath) break if PluginUtil.contains(self.CHECK_PUBLIC_DIR, file_body): external_pub_dir.append(filepath) break except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue # Store the content obtained above in a column format storage = "\n".join(external_storage) media = "\n".join(external_media) pub_dir = "\n".join(external_pub_dir) if external_storage: PluginUtil.reportWarning(filepath, check_external_storage(storage), res) if external_media: PluginUtil.reportWarning(filepath, check_media_directory(media), res) if external_pub_dir: PluginUtil.reportWarning(filepath, check_public_directory(pub_dir), res) queue.put(res)
def target(self, queue): f = str(common.manifest) res = [] count = 0 ordered_broadcast = [] path_variable_list = [] launch_mode_list = [] global fileName # full path to app manifest fileName = qarkMain.find_manifest_in_source() receivers = self.UserCreatedReceivers() for receiver in receivers: if "exported" and "true" in str(receiver): if not any(re.findall(self.PRIORITY_REGEX, str(receiver))): ordered_broadcast.append(str(receiver)) # Arrange exported broadcast receiver without priority set in column format list_orderedBR = " \n".join(ordered_broadcast) if ordered_broadcast: PluginUtil.reportWarning( fileName, self.OrderedBroadcastIssueDetails(list_orderedBR), res) for line in f.splitlines(): count += 1 # update progress bar pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(f.splitlines()))) if any(re.findall(self.PATH_USAGE, line)): path_variable_list.append(line) if any(re.findall(self.LAUNCH_MODE, line)): launch_mode_list.append(line) if any(re.findall(self.TASK_REPARENTING, line)): PluginUtil.reportInfo(fileName, self.TaskReparentingIssue(fileName), res) # Arrange identified path variable and launch mode usage in column format path_variable = " \n".join(path_variable_list) launch_mode_variable = "\n".join(launch_mode_list) if path_variable_list: PluginUtil.reportWarning(fileName, self.PathUsageIssue(path_variable), res) if launch_mode_list: PluginUtil.reportInfo(fileName, self.LaunchModeIssue(launch_mode_variable), res) # Check for google safebrowsing API if "WebView" in f.splitlines(): if "EnableSafeBrowsing" and "true" not in f.splitlines(): PluginUtil.reportInfo(fileName, self.SafebrowsingIssueDetails(fileName), res) # send all results back to main thread queue.put(res)
def target(self, queue): files = common.java_files parser = plyj.Parser() global filepath, tree tree = '' res = [] issues_list = [] count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue try: global url url = [] for import_decl in tree.import_declarations: # Check import statements with value declared as WebView and WebSettings for the potential use of web views if 'WebView' in import_decl.name.value or 'WebSettings' in import_decl.name.value: with open(filepath, 'r') as r: data = r.read() if PluginUtil.contains(self.JAVASCRIPT_ENABLED, data): if PluginUtil.contains(self.MIXED_CONTENT, data): PluginUtil.reportWarning(filepath, mixed_content(filepath), res) if "setAllowFileAccess(false)" or "setAllowContentAccess(false)" not in data: if filepath not in issues_list: issues_list.append(filepath) if PluginUtil.contains(self.LOAD_URL_HTTP, data): PluginUtil.reportWarning(filepath, load_http_urls(filepath), res) break except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue try: for import_decl in tree.import_declarations: if 'WebView' in import_decl.name.value or 'WebSettings' in import_decl.name.value: for type_decl in tree.type_declarations: # Check for class declaration in java source code and traverse further down the AST to find method names if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: if type(fields) is m.MethodDeclaration: if 'shouldOverrideUrlLoading' in fields.name: if 'true' not in str(fields.body): PluginUtil.reportWarning(filepath, url_override(filepath), res) break else: continue if 'shouldInterceptRequest' in fields.name: if 'null' in str(fields.body): PluginUtil.reportWarning(filepath, intercept_request(filepath), res) break else: continue break except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue if issues_list: issue_name = " \n".join(issues_list) PluginUtil.reportInfo(filepath, secure_content(issue_name), res) queue.put(res)
def main(queue): global parser global tree results = [] count = 0 if common.minSdkVersion < 19: weak_rng_warning(results) find_key_files(results) for j in common.java_files: count = count + 1 pub.sendMessage('progress', bar='Crypto issues', percent=round(count * 100 / common.java_files.__len__())) try: tree = parser.parse_file(j) if tree is not None: # if re.search(r'\.getInstance\(',str(tree)): # print "YES" for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: try: recursive_ecb_check(t, j, results) # fixedSeedCheck(t,j) except Exception as e: common.logger.debug("Error running recursive_ecb_check in cryptoFlaws.py: " + str(e)) report.write("parsingerror-issues-list", "Error running recursive_ecb_check in cryptoFlaws.py: " + str(e), "strong") # Using a fixed seed with SecureRandom for import_decl in tree.import_declarations: try: if 'SecureRandom' in import_decl.name.value: if "setSeed" in str(tree): issue = ReportIssue() issue.setCategory(ExploitType.CRYPTO) issue.setDetails( "setSeed should not be called with SecureRandom, as it is insecure. Specifying a fixed seed will cause the instance to return a predictable sequence of numbers. This may be useful for testing but it is not appropriate for secure use.") issue.setFile(str(j)) issue.setSeverity(Severity.VULNERABILITY) results.append(issue) issue = terminalPrint() issue.setLevel(Severity.VULNERABILITY) issue.setData( "setSeed should not be called with SecureRandom, as it is insecure. Specifying a fixed seed will cause the instance to return a predictable sequence of numbers. This may be useful for testing but it is not appropriate for secure use.") results.append(issue) if "generateSeed" in str(tree): issue = ReportIssue() issue.setCategory(ExploitType.CRYPTO) issue.setDetails( "generateSeed should not be called with SecureRandom, as it is insecure. Specifying a fixed seed will cause the instance to return a predictable sequence of numbers. This may be useful for testing but it is not appropriate for secure use.") issue.setFile(str(j)) issue.setSeverity(Severity.VULNERABILITY) results.append(issue) issue = terminalPrint() issue.setLevel(Severity.VULNERABILITY) issue.setData( "generateSeed should not be called with SecureRandom, as it is insecure. Specifying a fixed seed will cause the instance to return a predictable sequence of numbers. This may be useful for testing but it is not appropriate for secure use. ") results.append(issue) except Exception as e: common.logger.debug("Error checking insecure used of SecureRandom in cryptoFlaws.py: " + str(e)) report.write("parsingerror-issues-list", "Error checking insecure used of SecureRandom in cryptoFlaws.py: " + str(e)) except Exception as e: common.logger.debug("Unable to create tree for " + str(j)) report.write("parsingerror-issues-list", "Unable to create tree for " + str(j), "strong") queue.put(results) return
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() total_debug_logs, total_verbose_logs, debug_logs, verbose_logs, verbose_logs_list, res, \ debug_logs_list, discovered_debug_logs, discovered_verbose_logs = ([] for _ in xrange(9)) tree = '' count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue # Traverse down the tree to find out verbose or debug logs try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: if type(fields) is m.MethodDeclaration: # Check if the app is send verbose logging message if str(fields.name) == 'v': verbose_logs.append(str(fields.name)) if filepath not in discovered_verbose_logs: discovered_verbose_logs.append(filepath) # Check if the app is send debug logging message elif str(fields.name) == 'd': debug_logs.append(str(fields.name)) if filepath not in discovered_debug_logs: discovered_debug_logs.append(filepath) except Exception as e: common.logger.debug("Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue # Join all the filename and path containing debug and verbose logging debug_logs_path = " \n".join(discovered_debug_logs) verbose_logs_path = " \n".join(discovered_verbose_logs) queue.put(res) # Display the file paths of all discovered logs if discovered_debug_logs: x = len(debug_logs) PluginUtil.reportInfo(filepath, debug_log_issues(debug_logs_path, x), res) if discovered_verbose_logs: y = len(verbose_logs) PluginUtil.reportInfo(filepath, verbose_log_issues(verbose_logs_path, y), res) # Sometimes Log functions may be called from a constructor and hence maybe skipped by tree # if len(debug_logs) == 0 and len(verbose_logs) == 0: for f in files: with open(f, 'r') as fi: filename = fi.read() filepath = str(f) find_debug_logs = re.findall(self.debug_regex, filename) find_verbose_logs = re.findall(self.verbose_regex, filename) if find_debug_logs: total_debug_logs.append(str(find_debug_logs)) if filepath not in debug_logs_list: debug_logs_list.append(filepath) if find_verbose_logs: total_verbose_logs.append(str(find_verbose_logs)) if filepath not in verbose_logs_list: verbose_logs_list.append(filepath) debug_path = " \n".join(debug_logs_list) verbose_path = " \n".join(verbose_logs_list) if total_debug_logs: x = len(total_debug_logs) PluginUtil.reportInfo(filepath, debug_log_issues(debug_path, x), res) if total_verbose_logs: y = len(total_verbose_logs) PluginUtil.reportInfo(filepath, verbose_log_issues(verbose_path, y), res)
def validate(queue, height): """ Validates common pitfalls for certificate validation logic """ # writer1 = common.Writer((0, height)) results = [] global tree global parser global filename global warningGiven global sslSessions count = 0 for j in common.java_files: sslSessions = [] count = count + 1 pub.sendMessage('progress', bar='X.509 Validation', percent=round(count * 100 / common.java_files.__len__())) filename = str(j) try: tree = parser.parse_file(j) except Exception as e: continue # No need to log this since we now warn potential parsing errors on screen and provide details in the report. if tree is None: results.append( "Some files may not be parsed correctly. For a list of such files, please review the final report." ) else: try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: try: recursive_insecure_trust_manager(t, j, results) except Exception as e: common.logger.error( "Unable to run recursive_insecure_trust_manager in certValidation.py: " + str(e)) try: recursive_insecure_ssl_error_handling( t, j, results) except Exception as e: common.logger.error( "Unable to run recursive_insecure_ssl_error_handling in certValidation.py " + str(e)) try: recursive_allow_all_hostname_verifier( t, j, results) except Exception as e: common.logger.error( "Unable to run recursive_allow_all_hostname_verifier in certValidation.py: " + str(e)) try: recursive_ssl_session(t, j, results) except Exception as e: common.logger.error( "Unable to run recursive_ssl_session in certValidation.py: " + str(e)) except Exception as e: if common.source_or_apk == 2: common.logger.error( "Error in validate function of certValidation.py: " + str(e)) else: common.logger.error("Bad file: " + str(j) + ", this is not uncommon") try: recursive_find_verify(None, j, results) except Exception as e: common.logger.error( "Problem in findVerify function of certValidation.py: " + str(e)) warningGiven = False unverified_sessions(results) queue.put(results) return
def target(self, queue): files = common.java_files global parser, tree, fileName, verbose, debug, debug_logs_path, verbose_logs_path parser = plyj.Parser() debug_logs = [] verbose_logs = [] discovered_debug_logs = [] discovered_verbose_logs = [] res = [] tree = '' count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) fileName = str(f) try: tree = parser.parse_file(f) except Exception as e: continue # Traverse down the tree to find out verbose or debug logs try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for t in type_decl.body: if type(t) is m.MethodDeclaration: if str(t.name) == 'v': verbose_logs.append(str(t.name)) discovered_verbose_logs.append(fileName) elif str(t.name) == 'd': debug_logs.append(str(t.name)) discovered_debug_logs.append(fileName) except Exception: continue # Join all the filename and path containing debug and verbose logging debug_logs_path = " \n".join(discovered_debug_logs) verbose_logs_path = " \n".join(discovered_verbose_logs) queue.put(res) if discovered_debug_logs: PluginUtil.reportInfo(fileName, self.DebugLogsIssueDetails(debug_logs_path), res) if discovered_verbose_logs: PluginUtil.reportInfo( fileName, self.VerboseLogsIssueDetails(verbose_logs_path), res) # Provide the count of verbose/debug logs. # Written separately so that issue description is mentioned once and not repeated for each warning. if debug_logs or verbose_logs: x = str(len(debug_logs)) y = str(len(verbose_logs)) PluginUtil.reportInfo(fileName, self.LogIssueDetails((x, y)), res) global reg, reg1, filename len_reg = [] len_reg1 = [] # Sometimes Log functions may be called from a constructor and hence maybe skipped by tree if len(debug_logs) == 0 and len(verbose_logs) == 0: for f in files: with open(f, 'r') as fi: filename = fi.read() file_name = str(f) reg = re.findall(self.debug_regex, filename) reg1 = re.findall(self.verbose_regex, filename) if reg: len_reg.append(str(reg)) PluginUtil.reportInfo( filename, self.DebugLogsIssueDetails(file_name), res) if reg1: len_reg1.append(str(reg1)) PluginUtil.reportInfo( filename, self.VerboseLogsIssueDetails(file_name), res) if len_reg or len_reg1: x = str(len(len_reg)) y = str(len(len_reg1)) PluginUtil.reportInfo(filename, self.LogIssueDetails((x, y)), res)
def target(self, queue): files = common.java_files global filepath, tree parser = plyj.Parser() total_debug_logs, total_verbose_logs, debug_logs, verbose_logs, verbose_logs_list, res, \ debug_logs_list, discovered_debug_logs, discovered_verbose_logs = ([] for _ in xrange(9)) tree = '' count = 0 for f in files: count += 1 pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(files))) filepath = str(f) try: tree = parser.parse_file(f) except Exception as e: common.logger.exception( "Unable to parse the file and generate as AST. Error: " + str(e)) continue # Traverse down the tree to find out verbose or debug logs try: for type_decl in tree.type_declarations: if type(type_decl) is m.ClassDeclaration: for fields in type_decl.body: if type(fields) is m.MethodDeclaration: # Check if the app is send verbose logging message if str(fields.name) == 'v': verbose_logs.append(str(fields.name)) if filepath not in discovered_verbose_logs: discovered_verbose_logs.append( filepath) # Check if the app is send debug logging message elif str(fields.name) == 'd': debug_logs.append(str(fields.name)) if filepath not in discovered_debug_logs: discovered_debug_logs.append(filepath) except Exception as e: common.logger.debug( "Plyj parser failed while parsing the file: " + filepath + "\nError" + str(e)) continue # Join all the filename and path containing debug and verbose logging debug_logs_path = " \n".join(discovered_debug_logs) verbose_logs_path = " \n".join(discovered_verbose_logs) queue.put(res) # Display the file paths of all discovered logs if discovered_debug_logs: x = len(debug_logs) PluginUtil.reportInfo(filepath, debug_log_issues(debug_logs_path, x), res) if discovered_verbose_logs: y = len(verbose_logs) PluginUtil.reportInfo(filepath, verbose_log_issues(verbose_logs_path, y), res) # Sometimes Log functions may be called from a constructor and hence maybe skipped by tree # if len(debug_logs) == 0 and len(verbose_logs) == 0: for f in files: with open(f, 'r') as fi: filename = fi.read() filepath = str(f) find_debug_logs = re.findall(self.debug_regex, filename) find_verbose_logs = re.findall(self.verbose_regex, filename) if find_debug_logs: total_debug_logs.append(str(find_debug_logs)) if filepath not in debug_logs_list: debug_logs_list.append(filepath) if find_verbose_logs: total_verbose_logs.append(str(find_verbose_logs)) if filepath not in verbose_logs_list: verbose_logs_list.append(filepath) debug_path = " \n".join(debug_logs_list) verbose_path = " \n".join(verbose_logs_list) if total_debug_logs: x = len(total_debug_logs) PluginUtil.reportInfo(filepath, debug_log_issues(debug_path, x), res) if total_verbose_logs: y = len(total_verbose_logs) PluginUtil.reportInfo(filepath, verbose_log_issues(verbose_path, y), res)
def target(self, queue): raw_file = str(common.manifest) # Split the raw file content into each individual line split_line = raw_file.splitlines() count = 0 # Create a list for each object ordered_broadcast, path_variable_list, launch_mode_list, api_key_list, res = ([] for _ in xrange(5)) global file_name # full path to app manifest file_name = qarkMain.find_manifest_in_source() receivers = self.UserCreatedReceivers() for receiver in receivers: if "exported" in str(receiver) and "true" in str(receiver): if not re.search(self.PRIORITY_REGEX, str(receiver)): ordered_broadcast.append(str(receiver)) # Arrange exported broadcast receiver without priority set in column format list_orderedBR = " \n".join(ordered_broadcast) if ordered_broadcast: PluginUtil.reportWarning(file_name, list_orderedBR, res) for line in split_line: count += 1 # update progress bar pub.sendMessage('progress', bar=self.name, percent=round(count * 100 / len(split_line))) if re.search(self.PATH_USAGE, line): path_variable_list.append(line) if re.search(self.LAUNCH_MODE, line): launch_mode_list.append(line) if re.search(self.TASK_REPARENTING, line): PluginUtil.reportInfo(file_name, task_reparenting(file_name), res) if re.match(self.API_KEY_REGEX, line): # Check if special character is present in the line. If "Yes, then ignore. if not re.match(self.SPECIAL_CHAR_REGEX, line) and line not in api_key_list: api_key_list.append(line) # Additional check for hardcoded api keys which matches the syntax most commonly used with google API_KEY if re.search(self.HARDCODED_API_KEY, line) and line not in api_key_list: api_key_list.append(line) # Arrange identified path variable and launch mode usage in column format path_variable = " \n".join(path_variable_list) launch_mode_variable = "\n".join(launch_mode_list) api_key_variable = "\n".join(api_key_list) if path_variable_list: PluginUtil.reportWarning(file_name, path_usage(path_variable), res) if launch_mode_list: PluginUtil.reportInfo(file_name, task_launch_mode(launch_mode_variable), res) if api_key_list: PluginUtil.reportInfo(file_name, hardcoded_api_key(api_key_variable), res) # Check for google safe browsing API if "WebView" in split_line: if "EnableSafeBrowsing" not in split_line and "true" not in split_line: PluginUtil.reportInfo(file_name, google_safe_browsing(file_name), res) # send all results back to main thread queue.put(res)