class Npmauditparser(): def __init__(self): self.es = elastic() self.const = Constants() self.utils = Utils() self.config = Config() def node_output(self, repo: str): if os.path.exists('%s%s/node_results.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/node_results.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) if self.es.get('advisories'): for i in res['advisories']: try: issue = { 'repo': repo, 'scanner': 'npm-audit', 'bug_type': '', 'language': 'nodejs', 'class_name': '', 'method_name': '', 'line_no_start': '', 'line_no_end': '', 'file_name': '', 'vulnerable_code': '', 'severity': '', 'module_name': '', 'advisories_url': '', 'vulnerable_versions': '', 'patched_versions': '', 'dependency_url': '', 'CVE': '', 'description': '', 'source_url': '', 'title': '' } issue["module_name"] = res['advisories'][i][ 'module_name'] issue["title"] = res['advisories'][i]['title'] issue["severity"] = res['advisories'][i][ 'severity'] issue["advisories_url"] = res['advisories'][i][ 'url'] issue["vulnerable_versions"] = res['advisories'][ i]['vulnerable_versions'] issue["patched_versions"] = res['advisories'][i][ 'patched_versions'] if self.utils.check_issue_exits( repo, str(issue)) == False and str(issue) != "": self.utils.sent_result_to_db( repo, str(issue), 'node-js', 'npm-audit') self.es.push_data_to_elastic_search(issue) self.utils.sent_to_slack( repo, json.dumps(issue, indent=4)) except Exception as e: print(e) return
def updateEvent(self, event, data): if Utils.getType(data) == 'builtins': if Utils.getType(event) == 'builtins': event = data else: #Es una asignacion incorrecta ya que intentamos meter un builtin en un objeto, no tocamos el evento pass return event else: if Utils.getType(event) == 'builtins': event = data return event elif Utils.getType(event) == 'dict': for key, value in data.items(): if key in event: event[key] = self.updateEvent(event[key], value) else: event[key] = None event[key] = self.updateEvent(event[key], value) return event elif Utils.getType(event) == 'custom': for key, value in data.items(): if hasattr(event, key): attr = getattr(event, key) attr_value = self.updateEvent(attr, value) setattr(event, key, attr_value) else: #Es una asignacion incorrecta ya que intentamos meter un valor en un atributo inexistente en el objeto, no tocamos el evento pass return event else: return event
def __init__(self): self.config = Config() self.NEXUS_URL_OLD = self.config.NEXUS_URL_OLD self.NEXUS_URL_NEW = self.config.NEXUS_URL_NEW self.const = Constants() self.utils = Utils() self.maven = [] self.gradle = []
def find_user(users: List[User], name_part: str) -> [User, None]: """Slow search, iterate over all users list, except if username == name_part But smarter cause it checks pertinence. """ name_part = unidecode(name_part.lower()) """ Ordre d'importance : * Est le display name * Pseudo démarre avec la recherche * % de match """ sort_rule = (("is_account_name", False), ("starts_with", True), ("match", True)) matches = [] for user in users: if user.bot: continue display_name = unidecode(user.display_name.lower()) username = unidecode(user.name.lower()) match = None names_comparaison = [] if name_part in display_name: names_comparaison.append( UserSearchResult(user, len(name_part) / len(display_name), display_name.startswith(name_part), False)) if name_part in username: names_comparaison.append( UserSearchResult(user, len(name_part) / len(username), username.startswith(name_part), True)) if names_comparaison: if len(names_comparaison) == 1: match = names_comparaison[0] else: match = Utils.multisort(names_comparaison, sort_rule)[0] if match: if match.match == 1: return match.user matches.append(match) if matches: Utils.multisort(matches, sort_rule) return matches[0].user return None
class Gosecparser(): def __init__(self): self.es = elastic() self.const = Constants() self.utils = Utils() self.config = Config() def golang_output(self, repo: str): if os.path.exists('%s%s/results.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/results.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: try: res = json.loads(file.read()) except ValueError as e: logging.debug( 'Error could not load the json file for the project: %s' % (repo)) for i in res['Issues']: issue = { 'repo': repo, 'scanner': 'gosec', 'bug_type': '', 'language': 'golang', 'class_name': '', 'method_name': '', 'line_no_start': '', 'line_no_end': '', 'file_name': '', 'vulnerable_code': '', 'severity': '', 'module_name': '', 'advisories_url': '', 'vulnerable_versions': '', 'patched_versions': '', 'dependency_url': '', 'CVE': '', 'description': '', 'source_url': '', 'title': '' } issue["issue"] = i['details'] issue["file_name"] = i['file'] issue["vulnerable_code"] = i['code'] issue["line_no"] = i['line'] if self.utils.check_issue_exits( repo, str(issue)) == False and str(issue) != "": self.utils.sent_result_to_db(repo, str(issue), 'golang', 'gosec') self.es.push_data_to_elastic_search(issue) self.utils.sent_to_slack(repo, json.dumps(issue, indent=4)) return
def __install_local(self, toolname, tool): """Push the binary from the workstation to the device""" local, command = tool['LOCAL'], tool['COMMAND'] name = Utils.extract_filename_from_path(command) if not self.__is_tool_available(name): self.device.printer.verbose('[INSTALL] Manually installing: %s' % toolname) src = local dst = Utils.path_join('/usr/bin/', name) self.device.push(src, dst) self.device.remote_op.chmod_x(dst) else: self.device.printer.debug('[INSTALL] Tool already available: %s' % toolname)
class Gitleaks(): """ """ def __init__(self): self.utils = Utils() self.config = Config() def gitleaks_scan(self, repo: str): os.chdir('%s' % (self.config.PATRONUS_DOWNLOAD_LOCATION)) self.utils.execute_cmd("gitleaks -r %s --report=%s/gitleaks.json --report-format=json" % (repo, repo),repo) return
class Gitleaksparser(): def __init__(self): self.es = elastic() self.utils = Utils() self.config = Config() def gitleaks_output(self, repo: str): if os.path.exists('%s%s/gitleaks.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/gitleaks.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) for i in res['Issues']: issue = { 'repo': repo, 'scanner': 'gosec', 'bug_type': '', 'language': 'golang', 'class_name': '', 'method_name': '', 'line_no_start': '', 'line_no_end': '', 'file_name': '', 'vulnerable_code': '', 'severity': '', 'module_name': '', 'advisories_url': '', 'vulnerable_versions': '', 'patched_versions': '', 'dependency_url': '', 'CVE': '', 'description': '', 'source_url': '', 'title': '', 'commit': '', 'tags': '', 'author': '' } issue["line_no_start"] = i['line'] issue["commit"] = i['commit'] issue["file_name"] = i['file'] issue["tags"] = i['tags'] issue["author"] = i['author'] if self.utils.check_issue_exits( repo, str(issue)) == False and str(issue) != "": self.utils.sent_result_to_db(repo, str(issue), 'gitleaks', 'gitleaks') self.es.push_data_to_elastic_search(issue) self.utils.sent_to_slack(repo, json.dumps(issue, indent=4)) return
def _grep(self, what, awk=False): def do_grep(what, where, select): cmd = "{bin} {opts} {what} {where} {select}".format(bin=Constants.PATH_TOOLS_LOCAL['GREP'], opts=self.GREP_OPTS, what=what, where=where, select=select) out, err = self.local_op.command_blocking(cmd) return filter(None, out.split('\n')) # Filter with AWK select = self.AWK if awk else "" # Where to search for if self.diffs: to_check = [] for d in self.diffs: where = Utils.escape_path(d) to_check.extend(do_grep(what, where, select)) return to_check else: where = Utils.escape_path(self.options['primary_folder']) return do_grep(what, where, select)
def structure_data(self, config_file): """Parse Plist configuration data into dict.""" try: config, merged = Utils.plist_read_from_file(config_file), {} for k in config.keys(): merged.update(config[k]) return merged except: self.printer.error('Invalid file: %s' % config_file) raise FrameworkException('Invalid configuration file!')
class GoLang(): """ """ def __init__(self): self.const = Constants() self.utils = Utils() self.config = Config() def gosec(self, repo: str): """ Initiates gosec scan """ os.chdir('%s%s' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) try: self.utils.execute_cmd( "gosec -no-fail -fmt=json -out=%s%s/results.json ./..." % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), repo) except: logging.debug("Error running dependency-check on %s" % (repo)) return
def to_internal_value(self, data): data_type=Utils.getType(data) if (data_type=='builtins'): return data elif (data_type=='custom'): return self.getValueObject(data) else: output={} for item in data: obj=data[item] output[item]=self.getValueObject(obj) return output
def module_run(self): # Create a file with the current time of last modification self.printer.verbose("Creating timestamp file...") ts = self.device.remote_op.create_timestamp_file( 'timestamp-caching-snapshot') # Launch the app self.printer.info("Launching the app...") self.device.app.open(self.APP_METADATA['bundle_id']) # Ask the user to background the app self.printer.info( "Background the app by hitting the home button, then press enter: " ) raw_input() time.sleep(2) # Check presence of new screenshots self.printer.info("Checking for new screenshots...") folder = os.path.join(self.APP_METADATA['data_directory'], 'Library/Caches/Snapshots/') cmd = '{bin} {folder} -type f -newer {ts} | sort -u'.format( bin=self.device.DEVICE_TOOLS['FIND'], folder=folder, ts=ts) out = self.device.remote_op.command_blocking(cmd) if not out: self.printer.warning("No new screenshots were detected") return # Print to console self.printer.notify("Screenshots found:") sc = [] for el in out: fname = el.strip() sc.append(fname) self.printer.notify('\t{}'.format(fname)) # Pull files & show image if self.options['pull']: self.printer.notify( 'Retrieving screenshots and saving them in: %s' % self.path_home_temp) for s in sc: # Pull file temp_name = Utils.extract_filename_from_path(s) temp_file = self.local_op.build_temp_path_for_file( self, temp_name) self.device.remote_op.download(s, temp_file) # Show image cmd = '{} "{}"'.format(self.TOOLS_LOCAL['EOG'], temp_file) self.local_op.command_blocking(cmd)
def module_run(self): self.printer.info("Looking for Binary Cookies files...") # Compose cmd string dirs = [ self.APP_METADATA['bundle_directory'], self.APP_METADATA['data_directory'] ] dirs_str = ' '.join(dirs) cmd = '{bin} {dirs_str} -type f -name "*binarycookies"'.format( bin=self.device.DEVICE_TOOLS['FIND'], dirs_str=dirs_str) out = self.device.remote_op.command_blocking(cmd) # No files found if not out: self.printer.error("No Binary Cookies files found") return # Save list self.add_issue('Binary Cookies files detected', out, 'INVESTIGATE', None) # Add data protection class self.printer.info("Retrieving data protection classes...") retrieved_files = self.device.app.get_dataprotection(out) # Analysis self.printer.info( "The following Binary Cookies files have been found:") if self.options['analyze']: # Show Menu remote_name = choose_from_list_data_protection(retrieved_files) local_name = self.device.app.convert_path_to_filename( remote_name, self.APP_METADATA) # Save it locally and analyze it self.save_file(remote_name, local_name, analyze=True) else: # Only list files, do not prompt the user choose_from_list_data_protection(retrieved_files, choose=False) # Dump all if self.options['dump_all']: self.printer.notify('Dumping all Binary Cookies files...') for fname in out: remote_name = Utils.escape_path(fname) # Convert the path to a valid filename local_name = self.device.app.convert_path_to_filename( fname, self.APP_METADATA) # Save it locally self.save_file(remote_name, local_name)
class DependencyCheck(): """ """ def __init__(self): self.java = Java() self.const = Constants() self.utils = Utils() self.config = Config() def dependency_check(self, repo: str): parent_dir = dirname( dirname(os.path.abspath(os.path.dirname(__file__)))) os.chdir(parent_dir + "/tools/dependency-check/bin/") try: self.utils.execute_cmd( "./dependency-check.sh --scan %s%s -f JSON -o %s%s" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo, self.config.PATRONUS_DOWNLOAD_LOCATION, repo), repo) logging.info("Successfully ran dependency-check on repo %s" % (repo)) except: logging.debug("Error running dependency-check on repo %s" % (repo)) return def dependency_check_maven(self, repo: str): try: os.chdir("%s/%s" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) self.utils.execute_cmd("mvn compile", repo) self.utils.execute_cmd("mvn dependency-check:check", repo) logging.info("Successfully ran dependency-check on repo %s" % (repo)) except: logging.debug("Error running dependency-check on repo %s" (repo)) return def dependency_check_gradle(self, repo: str): try: os.chdir("%s/%s" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) self.utils.execute_cmd("./gradlew dependencyCheckAnalyze", repo) logging.info("Successfully ran dependency-check on repo %s" % (repo)) except: logging.debug("Error running dependency-check on repo %s" (repo), repo) return
def module_run(self): self.printer.info("Looking for Cache.db files...") # Compose cmd string dirs = [ self.APP_METADATA['bundle_directory'], self.APP_METADATA['data_directory'] ] dirs_str = ' '.join(dirs) cmd = '{bin} {dirs_str} -type f -name "*Cache.db"'.format( bin=self.device.DEVICE_TOOLS['FIND'], dirs_str=dirs_str) out = self.device.remote_op.command_blocking(cmd) # No files found if not out: self.printer.info("No Cache.db files found") return # Add data protection class self.printer.info("Retrieving data protection classes...") retrieved_files = self.device.app.get_dataprotection(out) # Show Menu self.printer.info("The following Cache.db files have been found:") if self.options['analyze']: option = choose_from_list_data_protection(retrieved_files) # Pull file fname = Utils.extract_filename_from_path(option) temp_file = self.local_op.build_temp_path_for_file(self, fname) self.device.pull(option, temp_file) # Analyze it with SQLite self.printer.info("Spawning SQLite3 console...") cmd_headers = ' -header' if self.options['headers'] else '' cmd_column = ' -column' if self.options['column_mode'] else '' cmd_csv = ' -csv' if self.options['csv_mode'] else '' cmd = '{bin} {header} {column} {csv} {db}'.format( bin=self.TOOLS_LOCAL['SQLITE3'], header=cmd_headers, column=cmd_column, csv=cmd_csv, db=temp_file) self.local_op.command_interactive(cmd) # Delete file self.local_op.delete_temp_file(self, fname) else: # Only list files, do not prompt the user choose_from_list_data_protection(retrieved_files, choose=False)
def show_image(self, sc): if self.options['pull']: self.printer.notify('Retrieving screenshots and saving them in: %s' % self.options['output']) for s in sc: # Pull file temp_name = Utils.extract_filename_from_path(s) temp_file = os.path.join(self.options['output'], temp_name) self.device.remote_op.download(s, temp_file) # Show image # Kali cmd = '{} "{}"'.format(self.TOOLS_LOCAL['EOG'], temp_file) out, err = self.local_op.command_blocking(cmd) if 'not found' in err: # OS X cmd = '{} "{}"'.format(self.TOOLS_LOCAL['OPEN'], temp_file) self.local_op.command_blocking(cmd)
def show_image(self, sc): if self.options['pull']: self.printer.notify( 'Retrieving screenshots and saving them in: %s' % self.options['output']) for s in sc: # Pull file temp_name = Utils.extract_filename_from_path(s) temp_file = os.path.join(self.options['output'], temp_name) self.device.remote_op.download(s, temp_file) # Show image # Kali cmd = '{} "{}"'.format(self.TOOLS_LOCAL['EOG'], temp_file) out, err = self.local_op.command_blocking(cmd) if 'not found' in err: # OS X cmd = '{} "{}"'.format(self.TOOLS_LOCAL['OPEN'], temp_file) self.local_op.command_blocking(cmd)
def module_run(self): self.printer.info("Looking for Binary Cookies files...") # Compose cmd string dirs = [self.APP_METADATA['bundle_directory'], self.APP_METADATA['data_directory']] dirs_str = ' '.join(dirs) cmd = '{bin} {dirs_str} -type f -name "*binarycookies"'.format(bin=self.device.DEVICE_TOOLS['FIND'], dirs_str=dirs_str) out = self.device.remote_op.command_blocking(cmd) # No files found if not out: self.printer.error("No Binary Cookies files found") return # Save list self.add_issue('Binary Cookies files detected', out, 'INVESTIGATE', None) # Add data protection class self.printer.info("Retrieving data protection classes...") retrieved_files = self.device.app.get_dataprotection(out) # Analysis self.printer.info("The following Binary Cookies files have been found:") if self.options['analyze']: # Show Menu remote_name = choose_from_list_data_protection(retrieved_files) local_name = self.device.app.convert_path_to_filename(remote_name, self.APP_METADATA) # Save it locally and analyze it self.save_file(remote_name, local_name, analyze=True) else: # Only list files, do not prompt the user choose_from_list_data_protection(retrieved_files, choose=False) # Dump all if self.options['dump_all']: self.printer.notify('Dumping all Binary Cookies files...') for fname in out: remote_name = Utils.escape_path(fname) # Convert the path to a valid filename local_name = self.device.app.convert_path_to_filename(fname, self.APP_METADATA) # Save it locally self.save_file(remote_name, local_name)
def _parse_certificate(self, data): self.printer.verbose("Parsing the certificate...") # Read the plist file pl = Utils.plist_read_from_file(data, use_plistlib=True) # Extract the Data field of the certificate and store it locally cert = pl["DeveloperCertificates"][0].data cert_file = self.device.local_op.build_temp_path_for_file("cert", self) self.device.local_op.write_file(cert_file, cert) # Extract strings and look for the distribution profile cmd = "cat {} | strings | grep iPhone".format(cert_file) out = self.device.local_op.command_blocking(cmd)[0] if out: msg = "Distribution Profile found" self.printer.notify(msg) self.print_cmd_output(out, None) self.add_issue('Provisioning Profile', '{}: {}'.format(msg, out), 'INVESTIGATE', None) else: msg = "No Distribution Profile found" self.printer.error(msg) self.add_issue('Provisioning Profile', msg, 'HIGH', None)
def module_run(self): self.printer.info('Searching for Configuration file...') # Check if the EffectiveUserSettings.plist file is present config_file = Constants.DEVICE_PATH_EFFECTIVE_USER_SETTINGS_IOS10 if "10" in self.device._ios_version else Constants.DEVICE_PATH_EFFECTIVE_USER_SETTINGS_IOS9_AND_BELOW if not self.device.remote_op.file_exist(config_file): raise FrameworkException('Could not find: %s' % config_file) # Pull Effective User Settings plist local_name = Utils.extract_filename_from_path(config_file) local_file = self.save_file(config_file, local_name) if not self.options['pull_only']: # Comparing configuration with template self.printer.info('Assessing Configuration...') if not self.options['template']: raise FrameworkException('Template not provided') self.compare(local_file, self.options['template']) self.printer.notify('Configuration Saved to: %s' % local_file)
def _parse_certificate(self, data): # Read the plist file pl = Utils.plist_read_from_file(data, use_plistlib=True) # Extract the Data field of the certificate and store it locally cert = pl["DeveloperCertificates"][0].data cert_file = self.device.local_op.build_temp_path_for_file("cert", self) self.device.local_op.write_file(cert_file, cert) # Extract strings and look for the distribution profile cmd = "cat {} | strings | grep iPhone".format(cert_file) out = self.device.local_op.command_blocking(cmd)[0] if out: msg = "Distribution Profile found" self.printer.notify(msg) self.print_cmd_output(out, None) self.add_issue('Provisioning Profile', '{}: {}'.format(msg, out), 'INVESTIGATE', None) else: msg = "No Distribution Profile found" self.printer.error(msg) self.add_issue('Provisioning Profile', msg, 'HIGH', None)
def module_run(self): self.printer.info("Looking for Binary Cookies files...") # Compose cmd string dirs = [ self.APP_METADATA['bundle_directory'], self.APP_METADATA['data_directory'] ] dirs_str = ' '.join(dirs) cmd = '{bin} {dirs_str} -type f -name "*binarycookies"'.format( bin=self.device.DEVICE_TOOLS['FIND'], dirs_str=dirs_str) out = self.device.remote_op.command_blocking(cmd) # No files found if not out: self.printer.info("No Binary Cookies files found") return # Add data protection class self.printer.info("Retrieving data protection classes...") retrieved_files = self.device.app.get_dataprotection(out) # Show Menu self.printer.info( "The following Binary Cookies files have been found:") if self.options['analyze']: option = choose_from_list_data_protection(retrieved_files) # Pull file fname = Utils.extract_filename_from_path(option) temp_file = self.local_op.build_temp_path_for_file(self, fname) self.device.pull(option, temp_file) # Analyze it with BinaryCookieReader cmd = 'python {bin} {temp_file}'.format( bin=self.TOOLS_LOCAL['BINARYCOOKIEREADER'], temp_file=temp_file) self.local_op.command_interactive(cmd) # Delete file self.local_op.delete_temp_file(self, fname) else: # Only list files, do not prompt the user choose_from_list_data_protection(retrieved_files, choose=False)
class Dependencycheckparser(): def __init__(self): self.es = elastic() self.const = Constants() self.utils = Utils() self.config = Config() def dependency_check_results_gradle(self, repo:str): if os.path.exists('%s%s/build/reports/dependency-check-report.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/build/reports/dependency-check-report.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) for i in res['dependencies']: issue = {'repo':repo, 'scanner': 'dependency-check', 'bug_type':'','language': 'java', 'class_name':'', 'method_name':'', 'line_no_start':'', 'line_no_end':'','file_name': '', 'vulnerable_code':'', 'severity':'', 'module_name':'', 'advisories_url':'', 'vulnerable_versions':'', 'patched_versions':'', 'dependency_url':'', 'CVE':'', 'description':'', 'source_url':'', 'title':''} if i.get('vulnerabilities'): for j in i['vulnerabilities']: if j['severity'] == "HIGH" or j['severity'] == "CRITICAL": issue["dependency_url"] = i['packages'][0]['url'] issue["CVE"] = j['name'] issue["description"] = j['description'] issue["source_url"] = j['references'][0]['url'] if self.utils.check_issue_exits(repo, str(issue)) == False and str(issue) != "": self.utils.sent_result_to_db(repo, str(issue), 'java', 'dependency-check') self.es.push_data_to_elastic_search(issue) self.utils.sent_to_slack(repo, json.dumps(issue, indent=4)) return def dependency_check_results_maven(self, repo:str): result = "" if os.path.exists('%s%s/target/dependency-check-report.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/target/dependency-check-report.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) for i in res['dependencies']: issue = {'repo':repo, 'scanner': 'dependency-check', 'bug_type':'','language': 'java', 'class_name':'', 'method_name':'', 'line_no_start':'', 'line_no_end':'','file_name': '', 'vulnerable_code':'', 'severity':'', 'module_name':'', 'advisories_url':'', 'vulnerable_versions':'', 'patched_versions':'', 'dependency_url':'', 'CVE':'', 'description':'', 'source_url':'', 'title':''} if i.get('vulnerabilities'): for j in i['vulnerabilities']: if j['severity'] == "HIGH" or j['severity'] == "CRITICAL": issue["dependency_url"] = i['packages'][0]['url'] issue["CVE"] = j['name'] issue["description"] = j['description'] issue["source_url"] = j['references'][0]['url'] if self.utils.check_issue_exits(repo, str(issue)) == False and str(issue) != "": self.utils.sent_result_to_db(repo, str(issue), 'java', 'dependency-check') self.es.push_data_to_elastic_search(issue) self.utils.sent_to_slack(repo, json.dumps(issue, indent=4)) return def node_results(self, repo:str): result = "" if os.path.exists('%s%s/target/dependency-check-report.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/target/dependency-check-report.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) for i in res['dependencies']: issue = {'repo':repo, 'scanner': 'dependency-check', 'bug_type':'','language': 'node-js', 'class_name':'', 'method_name':'', 'line_no_start':'', 'line_no_end':'','file_name': '', 'vulnerable_code':'', 'severity':'', 'module_name':'', 'advisories_url':'', 'vulnerable_versions':'', 'patched_versions':'', 'dependency_url':'', 'CVE':'', 'description':'', 'source_url':'', 'title':''} if i.get('vulnerabilities'): for j in i['vulnerabilities']: if j['severity'] == "HIGH" or j['severity'] == "CRITICAL": issue["dependency_url"] = i['packages'][0]['url'] issue["CVE"] = j['name'] issue["description"] = j['description'] issue["source_url"] = j['references'][0]['url'] if self.utils.check_issue_exits(repo, str(issue)) == False and str(issue) != "": self.utils.sent_result_to_db(repo, str(issue), 'java', 'dependency-check') self.es.push_data_to_elastic_search(issue) self.utils.sent_to_slack(repo, json.dumps(issue, indent=4)) return
class Fsbparser(): def __init__(self): self.es = elastic() self.const = Constants() self.utils = Utils() self.config = Config() def gradle_output(self, repo: str): if os.path.exists('%s%s/build/reports/findbugs/main.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/build/reports/findbugs/main.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) if "BugInstance" in res['BugCollection']: for i in res['BugCollection']['BugInstance']: issue = { 'repo': repo, 'scanner': 'find-sec-bugs', 'bug_type': '', 'language': 'java', 'class_name': '', 'method_name': '', 'line_no_start': '', 'line_no_end': '', 'file_name': '', 'vulnerable_code': '', 'severity': '', 'module_name': '', 'advisories_url': '', 'vulnerable_versions': '', 'patched_versions': '', 'dependency_url': '', 'CVE': '', 'description': '', 'source_url': '', 'title': '' } try: if i['@category'] == "SECURITY": issue['bug_type'] = i['@type'] issue['class_name'] = i['Class']['@classname'] if "Method" in i: issue["method_name"] = i['Method']['@name'] if type(i['SourceLine']) == list: issue["line_no_start"] = i['SourceLine'][ 0]['@start'] issue["line_no_end"] = i['SourceLine'][0][ '@start'] if type(i['SourceLine']) == dict: issue["line_no_start"] = i['SourceLine'][ '@start'] issue["line_no_end"] = i['SourceLine'][ '@start'] if self.utils.check_issue_exits( repo, str(issue) ) == False and str(issue) != "": self.utils.sent_result_to_db( repo, str(issue), 'java', 'find-sec-bugs') self.es.push_data_to_elastic_search(issue) # self.utils.sent_to_slack(repo, json.dumps(issue, indent=4)) except Exception as e: print(e) if os.path.exists('%s%s/main.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/main.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) if "BugInstance" in res['BugCollection']: for i in res['BugCollection']['BugInstance']: issue = { 'repo': repo, 'scanner': 'find-sec-bugs', 'bug_type': '', 'language': 'java', 'class_name': '', 'method_name': '', 'line_no_start': '', 'line_no_end': '', 'file_name': '', 'vulnerable_code': '', 'severity': '', 'module_name': '', 'advisories_url': '', 'vulnerable_versions': '', 'patched_versions': '', 'dependency_url': '', 'CVE': '', 'description': '', 'source_url': '', 'title': '' } try: if i['@category'] == "SECURITY": issue['bug_type'] = i['@type'] issue['class_name'] = i['Class']['@classname'] if "Method" in i: issue["method_name"] = i['Method']['@name'] if type(i['SourceLine']) == list: issue["line_no_start"] = i['SourceLine'][ 0]['@start'] issue["line_no_end"] = i['SourceLine'][0][ '@start'] if type(i['SourceLine']) == dict: issue["line_no_start"] = i['SourceLine'][ '@start'] issue["line_no_end"] = i['SourceLine'][ '@start'] if self.utils.check_issue_exits( repo, str(issue) ) == False and str(issue) != "": self.utils.sent_result_to_db( repo, str(issue), 'java', 'find-sec-bugs') self.es.push_data_to_elastic_search(issue) # self.utils.sent_to_slack(repo, json.dumps(issue, indent=4)) except Exception as e: print(e) return def maven_output(self, repo: str): if os.path.exists('%s%s/target/spotbugsXml.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/target/spotbugsXml.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) if "BugInstance" in res['aBugCollection']: for i in res['BugCollection']['BugInstance']: issue = { 'repo': repo, 'scanner': 'find-sec-bugs', 'bug_type': '', 'language': 'java', 'class_name': '', 'method_name': '', 'line_no_start': '', 'line_no_end': '', 'file_name': '', 'vulnerable_code': '', 'severity': '', 'module_name': '', 'advisories_url': '', 'vulnerable_versions': '', 'patched_versions': '', 'dependency_url': '', 'CVE': '', 'description': '', 'source_url': '', 'title': '' } try: if type(i) is dict: if i['@category'] == "SECURITY": issue["issue"] = i['@type'] result["class_name"] = i['Class'][ '@classname'] issue["method_name"] = i['Method']['@name'] if type(i['SourceLine']) == list: issue["line_no_start"] = i[ 'SourceLine'][0]['@start'] issue["line_no_end"] + i['SourceLine'][ 0]['@start'] if type(i['SourceLine']) == dict: issue["line_no_end"] = i['SourceLine'][ '@start'] issue["line_no_end"] = i['SourceLine'][ '@start'] if self.utils.check_issue_exits( repo, str(issue) ) == False and str(issue) != "": self.utils.sent_result_to_db( repo, str(issue), 'java', 'find-sec-bugs') self.es.push_data_to_elastic_search( issue) self.utils.sent_to_slack( repo, json.dumps(issue, indent=4)) except Exception as e: print(e) if os.path.exists('%s%s/spotbugsXml.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open('%s%s/spotbugsXml.json' % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = json.loads(file.read()) if "BugInstance" in res['aBugCollection']: for i in res['BugCollection']['BugInstance']: issue = { 'repo': repo, 'scanner': 'find-sec-bugs', 'bug_type': '', 'language': 'java', 'class_name': '', 'method_name': '', 'line_no_start': '', 'line_no_end': '', 'file_name': '', 'vulnerable_code': '', 'severity': '', 'module_name': '', 'advisories_url': '', 'vulnerable_versions': '', 'patched_versions': '', 'dependency_url': '', 'CVE': '', 'description': '', 'source_url': '', 'title': '' } try: if type(i) is dict: if i['@category'] == "SECURITY": issue["issue"] = i['@type'] result["class_name"] = i['Class'][ '@classname'] issue["method_name"] = i['Method']['@name'] if type(i['SourceLine']) == list: issue["line_no_start"] = i[ 'SourceLine'][0]['@start'] issue["line_no_end"] + i['SourceLine'][ 0]['@start'] if type(i['SourceLine']) == dict: issue["line_no_end"] = i['SourceLine'][ '@start'] issue["line_no_end"] = i['SourceLine'][ '@start'] if self.utils.check_issue_exits( repo, str(issue) ) == False and str(issue) != "": self.utils.sent_result_to_db( repo, str(issue), 'java', 'find-sec-bugs') self.es.push_data_to_elastic_search( issue) self.utils.sent_to_slack( repo, json.dumps(issue, indent=4)) except Exception as e: print(e) return
def compare(self, f_current, f_desired): """Compare two config files.""" current = self.structure_data(f_current) desired = self.structure_data(f_desired) alert_tracker = 0 # Print output header print '' self.printer.info(40 * '-') self.printer.notify('Device Configuration Assessment') self.printer.info(40 * '-') # Compare attributes for k, v in desired.items(): if k in current.keys(): # Get setting status status = '[NOT CONFIGURED]' if len(v) == 0 else '' if len(v) == 1 and len(current[k].keys()) != 0: if str(current[k][current[k].keys()[0]]) == 'True': status = '[ENABLED]' else: status = '[DISABLED]' # Check for Config and Desired config setting mismatch if v != current[k]: alert_tracker += 1 attribute = k + ': ' if status != '': attribute += status self.printer.warning('[WEAK] %s' % attribute) # If attribute consists of multiple dict values, process and output if Utils.is_plist(current[k]) and len(current[k]) > 1: for k1, v1 in current[k].items(): attribute = '\t%s: %s' % ((str(k1).replace( 'range', '')).ljust(9), str(v1)) try: recommendation = '' if current[k][k1] != desired[k][k1]: recommendation = ' (Recommend: %s)' % str( desired[k][k1]) except KeyError: continue # Print config status and recommended value self.printer.notify('%s%s' % (attribute, recommendation)) # Else print config status and recommended value else: val = desired[k] recommended = 'DISABLING' if str(val[val.keys()[0]]) == 'True': recommended = 'ENABLING' self.printer.notify('\tRecommend: %s' % recommended) # Print output footer self.printer.info(40 * '-') self.printer.notify('%d/%d Misconfigurations' % (alert_tracker, len(desired))) self.printer.info(40 * '-') print ''
class Java(): """ """ def __init__(self): self.config = Config() self.NEXUS_URL_OLD = self.config.NEXUS_URL_OLD self.NEXUS_URL_NEW = self.config.NEXUS_URL_NEW self.const = Constants() self.utils = Utils() self.maven = [] self.gradle = [] def check_build(self, repo: str): """ Check if build is maven or gradle """ build = "" mvn = Path("%s%s/pom.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) gradle = Path("%s%s/build.gradle" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) if mvn.is_file(): build = "maven" self.maven.append(repo) return build elif gradle.is_file(): build = "gradle" self.maven.append(repo) return build else: pass def build_maven(self, repo: str): """ """ os.chdir("%s%s" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) try: rc = self.utils.execute_cmd("mvn compile", repo) rc2 = self.utils.execute_cmd("mvn spotbugs:spotbugs", repo) logging.info("successfully build maven project %s " % (repo)) except: logging.debug('Error building maven project %s' % (repo)) return def build_gradle(self, repo: str): """ """ os.chdir("%s%s" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) try: self.utils.execute_cmd("./gradlew clean check -x test", repo) logging.info("successfully build gradle project %s " % (repo)) except: logging.debug("Error building gradle project %s" % (repo)) return def register_all_namespaces(self, filename): """ https://stackoverflow.com/questions/54439309/how-to-preserve-namespaces-when-parsing-xml-via-elementtree-in-python """ namespaces = dict( [node for _, node in ET.iterparse(filename, events=['start-ns'])]) for ns in namespaces: ET.register_namespace(ns, namespaces[ns]) return def insert_build_tag(self, repo): return def add_build_tag(self, repo: str): try: tree = ET.parse("%s%s/pom.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) root = tree.getroot() filename = "%s%s/pom.xml" % ( self.config.PATRONUS_DOWNLOAD_LOCATION, repo) namespaces = dict([ node for _, node in ET.iterparse(filename, events=['start-ns']) ]) for ns in namespaces: ET.register_namespace(ns, namespaces[ns]) root.append(ET.fromstring(self.const.POM_BUILD_TAG)) tree.write("%s%s/pom.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), xml_declaration=True) except: logging.debug("Failed modified pom.xml for maven project %s" % (repo)) return def modify_pom_for_findsecbugs(self, repo: str): """ """ try: tree = ET.parse("%s%s/pom.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) root = tree.getroot() filename = "%s%s/pom.xml" % ( self.config.PATRONUS_DOWNLOAD_LOCATION, repo) namespaces = dict([ node for _, node in ET.iterparse(filename, events=['start-ns']) ]) for ns in namespaces: ET.register_namespace(ns, namespaces[ns]) build = root.find('{http://maven.apache.org/POM/4.0.0}build') if build is None: self.add_build_tag(repo) build = root.find('{http://maven.apache.org/POM/4.0.0}build') plugins = build.find('{http://maven.apache.org/POM/4.0.0}plugins') plugins.append(ET.fromstring(self.const.FINDSECBUGS_XML)) plugins.append(ET.fromstring(self.const.DEPENDENCY_CHECK_XML)) tree.write("%s%s/pom.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), xml_declaration=True) self.update_nexus_url(repo) logging.info("successfully modified pom.xml for maven project %s" % (repo)) except: logging.debug("Failed modified pom.xml for maven project %s" % (repo)) return def parse_xml(self, repo: str): """ """ with open("%s%s/pom.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as xml_file: soup = BeautifulSoup(xml_file, "lxml") artifactid = soup.artifactid.string version = soup.version.string if soup.packaging is not None: packaging = soup.packaging.string else: packaging = "jar" return "%s_%s.%s" % (artifactid, version, packaging) def convert_xml_to_json(self, repo: str): """ """ if self.check_build(repo) is "maven": if os.path.exists("%s%s/target/spotbugsXml.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open( "%s%s/target/spotbugsXml.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: xmlString = file.read() jsonString = json.dumps(xmltodict.parse(xmlString), indent=4) with open( "%s%s/target/spotbugsXml.json" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), 'w') as f: f.write(jsonString) if os.path.exists("%s%s/spotbugsXml.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open( "%s%s/spotbugsXml.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: res = xmltodict.parse(file.read()) result = json.dumps(res, indent=4) with open( "%s%s/spotbugsXml.json" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), 'w') as f: f.write(jsonString) elif self.check_build(repo) is "gradle": if os.path.exists("%s%s/build/reports/findbugs/main.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open( "%s%s/build/reports/findbugs/main.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: xmlString = file.read() jsonString = json.dumps(xmltodict.parse(xmlString), indent=4) with open( "%s%s/build/reports/findbugs/main.json" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), 'w') as f: f.write(jsonString) if os.path.exists("%s%s/main.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): with open( "%s%s/main.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as file: xmlString = file.read() jsonString = json.dumps(xmltodict.parse(xmlString), indent=4) with open( "%s%s/main.json" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), 'w') as f: f.write(jsonString) else: pass return def modify_gradle_for_findsecbugs(self, repo: str): """ """ lookup_lineno = [] lookup_2_lineno = [] try: with open( "%s%s/build.gradle" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as myFile: for num, line in enumerate(myFile, 1): if self.const.FINDSECBUGS_PATTERN_1 in line: lookup_lineno.append(num) # reading gradle file f = open( "%s%s/build.gradle" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), "r") contents = f.readlines() f.close() #apply plugin contents.insert(lookup_lineno[0], self.const.FINDSECBUGS_PATTERN_VALUE_1) contents.insert(lookup_lineno[0], self.const.DEPENDENCY_CHECK_PATTERN_VALUE_2) # writing contents f = open( "%s%s/build.gradle" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), "w") contents = "".join(contents) f.write(contents) f.close() with open( "%s%s/build.gradle" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)) as myFile: for num, line in enumerate(myFile, 1): if self.const.FINDSECBUGS_PATTERN_2 in line: lookup_2_lineno.append(num) # reading gradle file f = open( "%s%s/build.gradle" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), "r") contents = f.readlines() f.close() #dependencies { contents.insert(lookup_2_lineno[-1] + 2, self.const.FINDSECBUGS_PATTERN_VALUE_2) contents.insert(lookup_2_lineno[0] + 1, self.const.DEPENDENCY_CHECK_PATTERN_VALUE_1) # writing contents f = open( "%s%s/build.gradle" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), "w") contents = "".join(contents) f.write(contents) f.close() f = open( "%s%s/build.gradle" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo), "a") f.write(self.const.FINDSECBUGS_PATTERN_VALUE_3) f.write(self.const.DEPENDENCY_CHECK_PATTERN_VALUE_3) f.close() self.update_nexus_url(repo) logging.info( "successfully modified build.gradle for gradle project %s" % (repo)) except: logging.debug( "Error modifying build.gradle for gradle project %s" % (repo)) return def project_build(self, repo: str): """ """ maven = [] gradle = [] if self.check_build(repo) == "maven": try: self.modify_pom_for_findsecbugs(repo) self.build_maven(repo) self.retry_for_failed_attempts(repo, "maven") self.convert_xml_to_json(repo) logging.info("Successfully build gradle project %s" % (repo)) except: logging.debug("Failed building gradle project %s" % (repo)) elif self.check_build(repo) == "gradle": try: self.modify_gradle_for_findsecbugs(repo) self.build_gradle(repo) self.retry_for_failed_attempts(repo, "gradle") self.convert_xml_to_json(repo) logging.info("Successfully build gradle project %s" % (repo)) except: logging.debug("Failed building gradle project %s" % (repo)) else: pass return def retry_for_failed_attempts(self, repo: str, build: str): if build is "maven": if not os.path.exists( "%s%s/target/spotbugsXml.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): self.fsb(repo, "maven") if build is "gradle": if not os.path.exists( "%s%s/build/reports/findbugs/main.xml" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo)): self.fsb(repo, "/gradle") return def fsb(self, repo: str, build: str): """ """ parent_dir = dirname( dirname(os.path.abspath(os.path.dirname(__file__)))) os.chdir(parent_dir + "/tools/findsecbugs") try: if build is "maven": self.utils.execute_cmd( "./findsecbugs.sh -xml -output %s%s/spotbugsXml.xml %s%s" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo, self.config.PATRONUS_DOWNLOAD_LOCATION, repo), repo) self.convert_xml_to_json(repo) if build is "gradle": self.utils.execute_cmd( "./findsecbugs.sh -xml -output %s%s/main.xml %s%s" % (self.config.PATRONUS_DOWNLOAD_LOCATION, repo, self.config.PATRONUS_DOWNLOAD_LOCATION, repo), repo) self.convert_xml_to_json(repo) except Exception as e: logging.debug("Error running find-sec-bugs on %s. Error: %s" % (repo, e)) return
def __init__(self): self.config = Config() self.utils = Utils()
def listen(): """ record audio from microphone""" utils = Utils() utils.clean_input_audio_dir() fname = recorder.record_to_file() settings.logger.info("result written to %s.wav" % fname)
def __init__(self): self.java = Java() self.const = Constants() self.utils = Utils() self.config = Config()
def _get_sorted_syntaxes(cls) -> List[CommandSyntax]: if cls._sorted_syntaxes is None: cls._sorted_syntaxes = cls.get_syntaxes().copy() Utils.multisort(cls._sorted_syntaxes, (("always_validate_input_format", False), ("param_count", True))) return cls._sorted_syntaxes
def compare(self, f_current, f_desired): """Compare two config files.""" current = self.structure_data(f_current) desired = self.structure_data(f_desired) alert_tracker = 0 # Print output header print '' self.printer.info(40*'-') self.printer.notify('Device Configuration Assessment') self.printer.info(40*'-') # Compare attributes for k, v in desired.items(): if k in current.keys(): # Get setting status status = '[NOT CONFIGURED]' if len(v) == 0 else '' if len(v) == 1 and len(current[k].keys()) != 0: if str(current[k][current[k].keys()[0]]) == 'True': status = '[ENABLED]' else: status = '[DISABLED]' # Check for Config and Desired config setting mismatch if v != current[k]: alert_tracker += 1 attribute = k +': ' if status != '': attribute += status self.printer.warning('[WEAK] %s' % attribute) # If attribute consists of multiple dict values, process and output if Utils.is_plist(current[k]) and len(current[k]) > 1: for k1, v1 in current[k].items(): attribute = '\t%s: %s' % ( (str(k1).replace('range', '')).ljust(9), str(v1)) try: recommendation = '' if current[k][k1] != desired[k][k1]: recommendation = ' (Recommend: %s)' % str(desired[k][k1]) except KeyError: continue # Print config status and recommended value self.printer.notify('%s%s' % (attribute, recommendation)) # Else print config status and recommended value else: val = desired[k] recommended = 'DISABLING' if str(val[val.keys()[0]]) == 'True': recommended = 'ENABLING' self.printer.notify('\tRecommend: %s' % recommended) # Print output footer self.printer.info(40*'-') self.printer.notify('%d/%d Misconfigurations' % (alert_tracker, len(desired))) self.printer.info(40*'-') print ''
mrc = MyRemoteCallbacks() config = Config() java = Java() dc = DependencyCheck() go = GoLang() node = NodeJs() repos = mrc.scan_repos() command = Command() const = Constants() gp = Gosecparser() fsbp = Fsbparser() dcp = Dependencycheckparser() np = Npmauditparser() utils = Utils() java_repos = [] go_repos = [] node_repos = [] repos_to_scan = [] def scan_complete(): """ """ print(Fore.GREEN + "[+]---------- Scan completed -------------" + Style.RESET_ALL) logging.info('Completed Scanning') return
def __init__(self): self.es = elastic() self.const = Constants() self.utils = Utils() self.config = Config()
def __init__(self): self.const = Constants() self.utils = Utils()