def github_urls(): repoRelaseMapping = distro.getRepoReleaseMapping() urls={} for repo in repoRelaseMapping.keys(): release = repoRelaseMapping[repo] url='https://github.com/nasifimtiazohi/{}-{}'.format(repo,release) urls[common.getRepoId(repo)]=url return urls
def addDepndencies(): # repos= common.getNpmPackageRepos() # sql.execute('truncate table npmDependencyTree') # for path in repos: # repo=path.split("/")[-1] # repoId = common.getRepoId(repo) # addNodeDependencies(repoId,path) repos = common.getWatchedRepos() for path in repos: repo = path.split("/")[-1] repoId = common.getRepoId(repo) if repoId == 19: addMavenDependencies(repoId, path)
def scanAndProcess(path): os.chdir(path) repo = path.split('/')[-1] repoId = common.getRepoId(repo) print('scanning ', path) start = datetime.now() report = os.system('snyk test --all-projects --dev --json > snyk.json') end = datetime.now() diff = end - start scantime = common.getTimeDeltaInMinutes(diff) report = json.loads(open('snyk.json', 'r').read()) mavenModules = [] npmModules = [] if type(report) == list: print('multi-project', len(report)) elif type(report) == dict: report = [report] print('single project', len(report)) for module in report: if module['packageManager'] == 'npm': npmModules.append(module) elif module['packageManager'] == 'maven': mavenModules.append(module) else: print('outside npm maven found. see report for ', path) npm_d = processNpmModules(repoId, npmModules) addNpmAlerts(npm_d) maven_d = processMavenModules(repoId, mavenModules) addMavenAlerts(maven_d) return scantime
def scanAndProcess(path): repo=path.split('/')[-1] repoId=common.getRepoId(repo) os.chdir(path) start= datetime.now() os.system('mvn com.redhat.victims.maven:security-versions:check') end=datetime.now() diff=end-start scanTime = common.getTimeDeltaInMinutes(diff) os.chdir(path+'/target') files=(os.popen("find . -type f -path */dependencies/* -name index.html").read()).split("\n")[:-1] assert len(files) == 1 file=files[0] soup= BeautifulSoup(open(file).read(),'lxml') d=getVulns(repoId, soup.find_all('table')[0]) vulns = dedupe_vulns(repoId, d) addAlerts(vulns) return scanTime
def processVulnMethods(repoId, data): for vuln in data: assert len(vuln['calls']) == 1 call = vuln['calls'][0] callChains = len(call['callChains']) className = call['method']['className'] descriptor = call['method']['descriptor'] method = call['method']['methodName'] q = 'insert into srcclrCallChains values(%s,%s,%s,%s,%s)' sql.execute(q, (repoId, className, descriptor, method, callChains)) for line in lines: if line in failures: continue repoName = '-'.join(line.split('-')[:-1]) repoId = common.getRepoId(repoName) filename = path + '/' + line + '/scan.json' with open(filename, 'r') as file: print(repoName) records = json.loads(file.read())['records'] assert len(records) == 1 data = records[0] allLibraries = data['libraries'] assert allMavenLibraries(allLibraries) # if 'vulnerabilities' in data.keys(): # for vuln in data['vulnerabilities']: # process_vulnerabilities(repoId, vuln, allLibraries) processVulnMethods(repoId, data['vulnMethods'])
count = npmHM[(dependencyId, vulnId, toolId)]['count'] q = 'insert into npmAlert values(%s,%s,%s,%s,%s,%s,%s,%s,%s)' try: sql.execute(q, (None, None, dependencyId, vulnId, None, toolId, None, severity, 1)) except sql.pymysql.IntegrityError as error: if error.args[0] == sql.PYMYSQL_DUPLICATE_ERROR: #TODO update scandate print('maven alert exists already in db') else: raise Exception(str(error)) if __name__ == '__main__': repoRelaseMapping = distro.getRepoReleaseMapping() print(len(repoRelaseMapping)) for repo in repoRelaseMapping.keys(): repoId = common.getRepoId(repo) if repoId != 1: continue githubReponame = repo + '-' + repoRelaseMapping[repo] print(githubReponame) alerts = getDependencyAlerts('nasifimtiazohi', githubReponame) print(alerts) # print("{} has {} alerts".format(githubReponame,len(alerts))) # processAlerts(repoId, alerts) #to help api rate limit time.sleep(3)