def upload(): """Upload Zipped Source""" if 'file' in request.files: filen = request.files['file'] _, extension = os.path.splitext(filen.filename.lower()) # Check for Valid ZIP if (filen and filen.filename and extension in settings.UPLD_ALLOWED_EXTENSIONS and filen.mimetype in settings.UPLD_MIME): filename = secure_filename(filen.filename) # Make upload dir if not os.path.exists(settings.UPLOAD_FOLDER): os.makedirs(settings.UPLOAD_FOLDER) # Save file zip_file = os.path.join(app.config['UPLOAD_FOLDER'], filename) filen.save(zip_file) # Get zip hash get_zip_hash = utils.gen_sha256_file(zip_file) # check if already scanned res = Results.query.filter(Results.scan_hash == get_zip_hash) if not res.count(): # App analysis dir app_dir = os.path.join(app.config['UPLOAD_FOLDER'], get_zip_hash + "/") # Make app analysis dir if not os.path.exists(app_dir): os.makedirs(app_dir) # Unzip utils.unzip(zip_file, app_dir) # Do scan scan_results = scan_dirs([app_dir]) print("[INFO] Static Analysis Completed!") _, sha2_hashes, hash_of_sha2 = utils.gen_hashes([app_dir]) tms = datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d %H:%M:%S') # Save Result print("[INFO] Saving Scan Results!") res_db = Results( filename, get_zip_hash, [app_dir], sha2_hashes, hash_of_sha2, scan_results['sec_issues'], scan_results['good_finding'], scan_results['missing_sec_header'], scan_results['files'], scan_results['total_count'], scan_results['vuln_count'], [], [], tms, ) db_session.add(res_db) db_session.commit() return jsonify({ "status": "success", "url": "result/" + get_zip_hash }) return jsonify({"status": "error", "desc": "Upload Failed!"})
def invalid(): """Mark the issue as invalid""" scan_hash = request.form["scan_hash"] invalid_hash = request.form["invalid_hash"] if utils.sha2_match_regex(scan_hash) and utils.sha2_match_regex( invalid_hash): res = Results.query.filter(Results.scan_hash == scan_hash) if res.count(): invld = utils.python_list(res[0].invalid) if invalid_hash not in invld: invld.append(invalid_hash) res.update({"invalid": invld}) db_session.commit() return jsonify({"status": "ok"}) return jsonify({"status": "failed"})
def revert(): """Revert not an issue to issue""" scan_hash = request.form["scan_hash"] finding_hash = request.form["finding_hash"] if utils.sha2_match_regex(scan_hash) and utils.sha2_match_regex( finding_hash): res = Results.query.filter(Results.scan_hash == scan_hash) if res.count(): reslvd = utils.python_list(res[0].resolved) if finding_hash in reslvd: reslvd.remove(finding_hash) res.update({"resolved": reslvd}) db_session.commit() return jsonify({"status": "ok"}) return jsonify({"status": "failed"})
def delete_scan(): """View File""" context = {"status": "failed"} scan_hash = request.form["scan_hash"] if utils.sha2_match_regex(scan_hash): res = Results.query.filter(Results.scan_hash == scan_hash).first() if res: locs = utils.python_list(res.locations) for loc in locs: shutil.rmtree(loc) ziploc = os.path.join(app.config['UPLOAD_FOLDER'], res.scan_file) os.remove(ziploc) db_session.delete(res) db_session.commit() context = {"status": "ok"} return jsonify(**context)
def analyze(): """Upload and analyze source code. Output JSON""" if 'file' in request.files: filen = request.files['file'] _, extension = os.path.splitext(filen.filename.lower()) # Check for Valid ZIP if (filen and filen.filename and extension in settings.UPLD_ALLOWED_EXTENSIONS and filen.mimetype in settings.UPLD_MIME): filename = secure_filename(filen.filename) # Make upload dir if not os.path.exists(settings.UPLOAD_FOLDER): os.makedirs(settings.UPLOAD_FOLDER) # Save file zip_file = os.path.join(app.config['UPLOAD_FOLDER'], filename) filen.save(zip_file) # Get zip hash get_zip_hash = utils.gen_sha256_file(zip_file) res = Results.query.filter(Results.scan_hash == get_zip_hash) time_execute = 0 #Check if file is scanned before if not res.count(): path_dir = os.path.join(app.config['UPLOAD_FOLDER'], get_zip_hash + "/") # Make app analysis dir if not os.path.exists(path_dir): os.makedirs(path_dir) # Unzip utils.unzip(zip_file, path_dir) start_time = time.time() scan_results = scan_dirs([path_dir]) time_execute = round(time.time() - start_time, 3) _, sha2_hashes, hash_of_sha2 = utils.gen_hashes([path_dir]) tms = datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d %H:%M:%S') print("[INFO] Saving Scan Results!") res_db = Results( filename, get_zip_hash, [path_dir], sha2_hashes, hash_of_sha2, scan_results['sec_issues'], scan_results['good_finding'], scan_results['missing_sec_header'], scan_results['files'], scan_results['total_count'], scan_results['vuln_count'], [], [], tms, time_execute, ) db_session.add(res_db) db_session.commit() # Get scan results from database res = Results.query.filter(Results.scan_hash == get_zip_hash) output = [] iss_count = 0 _type = "sast" time_execute = res.first().time_execute for group, issues in (res.first().sec_issues).items(): for issue in issues: iss_count += 1 _location = {} _ruleId = issue['title'] _location['path'] = issue["path"] _location['positions'] = {} _location['positions']['begin'] = {} _location['positions']['begin']['line'] = issue['line'] _metadata = {} _metadata["description"] = issue['description'] output.append({ 'type': _type, "ruleId": _ruleId, "location": _location, "metadata": _metadata }) response = { "engine": { "name": "guardrails/engine-javascript-nodejsscan", "version": "1.0.0" }, "process": { "name": "nodejsscan", "version": "3.4" }, "language": "javascript", "status": "success", "executionTime": time_execute * 1000, "issues": iss_count, "output": output } return (json.dumps(OrderedDict(response))) else: return "Only accept zip file\n" else: return "Missing param file\n" return "Internal errors\n"