def scan_detekt(filename, stream_result_path, pid_file, config_path): filename_result = result.get_result_file_path(stream_result_path, filename) detekt_err_file = filename_result + ".error" detekt_xml = filename_result + ".xml" #print("detekt scan: " + filename) current_path = sys.path[0] + '/../' detekt_bin = os.path.join(current_path, 'tools_dep/detekt') command = "java -jar " + detekt_bin + "/detekt-cli-1.0.0-RC14-all.jar " + "-i " + \ filename + " -c " + config_path + " -r xml:" + detekt_xml detekt_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \ shell=True, start_new_session=True) try: pid_config.add_pid(str(detekt_p_2.pid), pid_file) for line in detekt_p_2.stdout: pass finally: detekt_p_2.terminate() detekt_p_2.wait() if os.path.isfile(detekt_xml): try: with open(detekt_err_file, 'w', encoding='utf-8') as logfile: tree = ET.ElementTree(file=detekt_xml) for elem in tree.iter(): if elem.tag == "error": checker = elem.attrib['source'].replace("detekt.", "") logfile.write(filename + '->' + elem.attrib['line'] + '->' + \ checker + '->' + elem.attrib['message'] + '\n') except Exception as e: print('=>detekt_ext.py->scan_detekt->ERROR: ' + str(e) + "->" + detekt_xml)
def scan_occheck(filename, stream_result_path, pid_file, config_path): filename_result = result.get_result_file_path(stream_result_path, filename) occheck_err_file = filename_result + ".error" occheck_json = filename_result + ".json" #print("occheck scan: " + filename) current_path = sys.path[0] + '/../' occheck_bin = os.path.join(current_path, 'tools_dep/occheck') command = "python " + occheck_bin + "/occheck.py " + filename + " --config " + config_path + " -f json -o " + occheck_json occheck_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \ shell=True, start_new_session=True) try: pid_config.add_pid(str(occheck_p_2.pid), pid_file) for line in occheck_p_2.stdout: continue finally: occheck_p_2.terminate() occheck_p_2.wait() if os.path.isfile(occheck_json): try: with open(occheck_json, 'r', encoding='utf-8') as occheckjsonfile: parsed_json = json.load(occheckjsonfile) with open(occheck_err_file, 'w', encoding='utf-8') as logfile: for checker_json in parsed_json: logfile.write(filename + '->' + str(checker_json['Line']) + '->' + \ checker_json['CheckName'] + '->' + checker_json['Message'] + '\n') except Exception as e: print('=>occheck_ext.py->scan_occheck->ERROR: ' + str(e) + "->" + occheck_json)
def main_scan(stream_info): try: if "SUB_PATH" in stream_info: os.environ["PATH"] = stream_info[ 'SUB_PATH'] + os.pathsep + os.environ["PATH"] current_path = sys.path[0] tool_bin = os.path.join(current_path, 'tools') content = util.base64toencode( util.str_to_bytes(json.dumps(stream_info))) ex_py_script = '' if 'TOOL_TYPE' in stream_info: ex_py_script = stream_info['TOOL_TYPE'] + '_ext.py' command = "python " + tool_bin + '/' + ex_py_script + " " + content print(util.get_datetime() + " start scan " + stream_info['TOOL_TYPE'] + "...") proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True) pid_config.add_pid(str(proc.pid), stream_info["PID_FILE"]) for line in proc.stdout: print(util.get_datetime() + " " + str(line)) print(util.get_datetime() + " end scan " + stream_info['TOOL_TYPE']) except Exception as e: raise Exception(e) finally: proc.terminate() proc.wait()
def scan_checkstyle(filename, stream_result_path, skip_filters, pid_file, config_path): filename_result = result.get_result_file_path(stream_result_path, filename) checkstyle_err_file = filename_result + ".error" checkstyle_xml = filename_result + ".xml" #print("checkstyle scan: "+filename) current_path = sys.path[0] + '/../' checkstyle_bin = os.path.join(current_path, 'tools_dep/checkstyle') classpath = '' if os_type == "Windows": classpath = checkstyle_bin + '/checkstyle-8.11-all.jar' else: classpath = checkstyle_bin + '/checkstyle-8.11-all.jar' command = "java -classpath " + classpath + " com.puppycrawl.tools.checkstyle.Main -c " + config_path + " -f xml " + filename + " >" + checkstyle_xml checkstyle_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True) try: pid_config.add_pid(str(checkstyle_p_2.pid), pid_file) for line in checkstyle_p_2.stdout: print(line) finally: checkstyle_p_2.terminate() checkstyle_p_2.wait() with open(checkstyle_xml, "rU+", encoding='utf-8') as file: lines = file.readlines() for line in lines: if not re.search('^<', line): lines.remove(line) lines_str = ''.join(lines).strip() file.seek(0) file.truncate() file.write(lines_str) #print("parse xml: "+checkstyle_xml) disable_option_array = skip_filters.split(";") if os.path.isfile(checkstyle_xml): try: with open(checkstyle_err_file, 'w', encoding='utf-8') as logfile: tree = ET.ElementTree(file=checkstyle_xml) for elem in tree.iter(): if "error" == elem.tag: #if elem.attrib['source'] in disable_option_array: # continue logfile.write(filename + '->' + elem.attrib['line'] + '->' + elem.attrib['source'] + '->' + elem.attrib['message'] + '\n') except Exception as e: print('=>checkstyle_ext.py->scan_checkstyle->ERROR:' + str(e) + checkstyle_xml)
def scan_sensitive(filename, stream_info): stream_result_path = stream_info['STREAM_RESULT_PATH'] pid_file = stream_info['PID_FILE'] stream_name = stream_info['STREAM_NAME'] proj_owner = stream_info['PROJ_OWNER'] proj_owner = proj_owner.replace(';', ',') skip_checkers = [] if 'SKIP_CHECKERS' in stream_info and stream_info['SKIP_CHECKERS'] != "": skip_checkers = stream_info['SKIP_CHECKERS'].split(";") filename_result = result.get_result_file_path(stream_result_path, filename) sensitive_err_file = filename_result + ".error" sensitive_json = filename_result + ".json" #print("sensitive scan: " + filename) tool_path = get_tool_path() command = tool_path + " " + stream_name + " " + \ proj_owner + " " + filename + " -f " + sensitive_json sensitive_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \ shell=True, start_new_session=True) try: pid_config.add_pid(str(sensitive_p_2.pid), pid_file) for line in sensitive_p_2.stdout: continue #line = str(line.decode('utf-8')) #print(line) finally: sensitive_p_2.terminate() sensitive_p_2.wait() if os.path.isfile(sensitive_json): with open(sensitive_json, 'r', encoding='utf-8') as sensitivejsonfile: try: reg = re.search("(?<=scan_result:).*", sensitivejsonfile.read()) parsed_json = reg.group(0) if reg else "{ }" parsed_json = json.loads(parsed_json) with open(sensitive_err_file, 'w', encoding='utf-8') as logfile: if "check_report" in parsed_json: for file_json in parsed_json["check_report"]: if not str( file_json['rule_name']) in skip_checkers: logfile.write(filename + '->' + str(file_json['line_no']) + '->' + \ str(file_json['rule_name']) + '->' + \ str(file_json['explanation']) + '\n') except Exception as e: print('=>sensitive_ext.py->scan_sensitive->ERROR: ' + str(e) + "->" + \ sensitive_json + "->" + sensitive_json.read())
def scan_eslint(filename, eslintrc, stream_result_path, skip_filters, pid_file, config_path): filename_result = result.get_result_file_path(stream_result_path, filename) eslint_err_file = filename_result + ".error" eslint_json = filename_result + ".json" #print("eslint scan: "+filename) current_path = sys.path[0] + '/../' eslint_bin = os.path.join(current_path, 'tools_dep/eslintrc') command = "eslint --no-eslintrc -f json -c " + config_path + " " + filename + " >" + eslint_json #print(command) eslint_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True) try: pid_config.add_pid(str(eslint_p_2.pid), pid_file) for line in eslint_p_2.stdout: if 'RangeError' in line: return print(line) finally: eslint_p_2.terminate() eslint_p_2.wait() #print("parse json: "+eslint_json) disable_option_array = skip_filters.split(";") if os.path.isfile(eslint_json): with open(eslint_json, 'r', encoding='utf-8') as eslintjsonfile: try: parsed_json = json.load(eslintjsonfile) with open(eslint_err_file, 'w', encoding='utf-8') as logfile: for file_json in parsed_json: path = file_json["filePath"] error_list = file_json["messages"] for line_json in error_list: #if not line_json["ruleId"] in disable_option_array: logfile.write( str(path) + "->" + str(line_json["line"]) + "->" + str(line_json["ruleId"]) + "->" + str(line_json["message"]) + "->" + str(line_json["nodeType"]) + "->" + str(line_json["column"]).replace("\n", "\\n") + "\n") except ValueError: print("parse json error, maybe empty :" + eslint_json)
def scan_ccn(filename, stream_result_path, pid_file, ccn_number): filename_result = result.get_result_file_path(stream_result_path, filename) ccn_err_file = filename_result + ".error" if os.path.isfile(filename): current_path = sys.path[0] + '/../' ccn_bin = os.path.join(current_path, 'tools_dep/lizard') command = "python " + ccn_bin + "/lizard.py " + filename + " -w -C " + ccn_number + " -L 100000 >" + ccn_err_file ccn_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True) try: pid_config.add_pid(str(ccn_p_2.pid), pid_file) for line in ccn_p_2.stdout: print(line) finally: ccn_p_2.terminate() ccn_p_2.wait()
def scan_stylecop(filename, stream_result_path, skip_filters, pid_file, config_path): filename_result = result.get_result_file_path(stream_result_path, filename) stylecop_err_file = filename_result + ".error" stylecop_xml = filename_result + ".xml" #print("stylecop scan: "+filename) current_path = sys.path[0] + '/../' stylecop_bin = os.path.join(current_path, 'tools_dep/stylecop') os.chdir(stylecop_bin) command = 'mono StyleCopCLI.exe -set ' + config_path + ' -cs \"' + filename + '\" -out ' + stylecop_xml stylecop_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True) try: pid_config.add_pid(str(stylecop_p_2.pid), pid_file) for line in stylecop_p_2.stdout: print(line) finally: stylecop_p_2.terminate() stylecop_p_2.wait() #print("parse xml: "+stylecop_xml) disable_option_array = skip_filters.split(";") if os.path.isfile(stylecop_xml): try: with open(stylecop_err_file, 'w', encoding='utf-8') as logfile: tree = ET.ElementTree(file=stylecop_xml) for elem in tree.iter(): if "Violation" == elem.tag: if elem.attrib['Rule'] in disable_option_array or elem.attrib[ 'Rule'] == 'CurlyBracketsForMultiLineStatementsMustNotShareLine' or elem.attrib[ 'Rule'] == 'SyntaxException': continue logfile.write(filename + '->' + elem.attrib['LineNumber'] + '->' + elem.attrib['Rule'] + '->' + elem.text + '\n') except Exception as e: print('=>stylecop_ext.py->scan_stylecop->ERROR:' + str(e) + stylecop_xml)
def scan_phpcs(filename, stream_info): stream_result_path = stream_info['STREAM_RESULT_PATH'] pid_file = stream_info['PID_FILE'] phpcs_standard = stream_info['PHPCS_STANDARD'] skip_checkers = [] if 'SKIP_CHECKERS' in stream_info and stream_info['SKIP_CHECKERS'] != "": skip_checkers = stream_info['SKIP_CHECKERS'].split(";") filename_result = result.get_result_file_path(stream_result_path, filename) phpcs_err_file = filename_result + ".error" phpcs_xml = filename_result + ".xml" #print("phpcs scan: " + filename) current_path = sys.path[0] + '/../' phpcs_bin = os.path.join(current_path, 'tools_dep/phpcs') command = "php " + phpcs_bin + "/phpcs.phar " + "--standard=" + phpcs_standard + " " + \ filename + " --report=xml > " + phpcs_xml phpcs_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \ shell=True, start_new_session=True) try: pid_config.add_pid(str(phpcs_p_2.pid), pid_file) for line in phpcs_p_2.stdout: line = str(line.decode('utf-8')) print(line) finally: phpcs_p_2.terminate() phpcs_p_2.wait() if os.path.isfile(phpcs_xml): try: with open(phpcs_err_file, 'w', encoding='utf-8') as logfile: tree = ET.ElementTree(file=phpcs_xml) for elem in tree.iter(): if (elem.tag == "error" or elem.tag == "warning") and \ not elem.attrib['source'] in skip_checkers: logfile.write(filename + '->' + elem.attrib['line'] + '->' + \ elem.attrib['source'] + '->' + elem.text + '\n') except Exception as e: print('=>phpcs_ext.py->scan_phpcs->ERROR: ' + str(e) + "->" + phpcs_xml)
def scan_pylint(filename, pylint_path, stream_result_path, disable_option, py_path, pid_file, config_path): filename_result = result.get_result_file_path(stream_result_path, filename) pylint_err_file = filename_result + ".error" pylint_json = filename_result + ".json" command = "python lint.py --output-format=json --reports=n --rcfile " + config_path + " " + filename + " >" + pylint_json try: os.chdir(pylint_path) #print("pylint scan: "+filename) pylint_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True) pid_config.add_pid(str(pylint_p_2.pid), pid_file) for line in pylint_p_2.stdout: print(line) finally: pylint_p_2.terminate() pylint_p_2.wait() #print("parse json: "+pylint_json) if os.path.isfile(pylint_json): with open(pylint_json, 'r', encoding='utf-8') as pylintjsonfile: try: parsed_json = json.load(pylintjsonfile) with open(pylint_err_file, 'w', encoding='utf-8') as logfile: for line_json in parsed_json: logfile.write( str(line_json["path"]) + "->" + str(line_json["line"]) + "->" + str(line_json["symbol"]) + "->" + str(line_json["message"]).replace("\n", "\\n") + "\n") except ValueError: #print("parse json error, maybe empty :"+pylint_json) pass
def check(): current_path = sys.path[0] project_file_list = "" stream_result_path = "" pool_processes = "" skip_filters = "" pid_file = "" config_path = "" if 'PROJECT_FILE_LIST' in stream_info: project_file_list = stream_info['PROJECT_FILE_LIST'] if 'STREAM_RESULT_PATH' in stream_info: stream_result_path = stream_info['STREAM_RESULT_PATH'] if 'POOL_PROCESSES' in stream_info: pool_processes = stream_info['POOL_PROCESSES'] if 'SKIP_CHECKERS' in stream_info: skip_filters = stream_info['SKIP_CHECKERS'] if 'PID_FILE' in stream_info: pid_file = stream_info['PID_FILE'] if 'CONFIG_PATH' in stream_info: config_path = stream_info['CONFIG_PATH'] if 'STREAM_CODE_PATH' in stream_info: stream_code_path = stream_info['STREAM_CODE_PATH'] if project_file_list == '' or stream_result_path == '' or pool_processes == '' or pid_file == '': print('below option is empty!') print('project_file_list: ' + project_file_list) print('stream_result_path: ' + stream_result_path) print('pool_processes: ' + pool_processes) print('pid_file: ' + pid_file) exit(1) os.chdir(stream_code_path) #运行编译命令 if 'PROJECT_BUILD_COMMAND' in stream_info: os.system(stream_info['PROJECT_BUILD_COMMAND']) #配置project_class_file_list stream_info['PROJECT_CLASS_FILE_LIST'] = os.path.join( stream_info["STREAM_DATA_PATH"], 'project_class_file_list.txt') #生成class文件列表 file.general_class_list_file(stream_info) #如果无project_class_file_list返回 if not os.path.isfile(stream_info['PROJECT_CLASS_FILE_LIST']): return current_path = sys.path[0] + '/../' spotbugs_lib = os.path.join(current_path, 'tools_dep/spotbugs/lib') spotbugs_ouput_xml = os.path.join(stream_info["STREAM_DATA_PATH"], 'spotbugs_ouput.xml') command = 'java -jar ' + spotbugs_lib + '/spotbugs.jar -textui -include ' + config_path + ' -xdocs -output ' + spotbugs_ouput_xml + ' -analyzeFromFile ' + stream_info[ 'PROJECT_CLASS_FILE_LIST'] spotbugs_p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \ shell=True, start_new_session=True) try: pid_config.add_pid(str(spotbugs_p.pid), pid_file) for line in spotbugs_p.stdout: pass finally: spotbugs_p.terminate() spotbugs_p.wait() os.chdir(current_path) if os.path.isfile(spotbugs_ouput_xml): try: sub_root = ET.ElementTree(file=spotbugs_ouput_xml).getroot() for elem in sub_root.findall("file"): try: reg_path = elem.attrib['classname'].replace('.', '/') + '.java' file_path = file.find_project_file_list_path( reg_path, project_file_list) if '' == file_path: continue filename_result = result.get_result_file_path( stream_result_path, file_path) spotbugs_err_file = filename_result + ".error" with open(spotbugs_err_file, 'w', encoding='utf-8') as logfile: for sub_elem in elem.iter(): try: logfile.write(file_path + '->' + sub_elem.attrib['line'] + '->' + sub_elem.attrib['type'] + '->' + sub_elem.attrib['message'] + '\n') except: pass except: pass except Exception as e: print('=>spotbugs_ext.py->check->ERROR: ' + str(e) + "->" + spotbugs_ouput_xml)
def check(): stream_code_path = "" project_dupc_xml = "" pool_processes = "" pid_file = "" skip_paths_arg = "" suffix_list = [] if 'STREAM_CODE_PATH' in stream_info: stream_code_path = stream_info['STREAM_CODE_PATH'] if 'PROJECT_DUPC_XML' in stream_info: project_dupc_xml = stream_info['PROJECT_DUPC_XML'] if 'POOL_PROCESSES' in stream_info: pool_processes = stream_info['POOL_PROCESSES'] if 'PID_FILE' in stream_info: pid_file = stream_info['PID_FILE'] if 'TARGET_SUBFIXS' in stream_info: suffix_list = stream_info['TARGET_SUBFIXS'].split(';') if 'SUB_CODE_PATH_LIST' in stream_info and stream_info['SUB_CODE_PATH_LIST'] != '': sub_code_path_list = stream_info['SUB_CODE_PATH_LIST'].split(',') sub_path_list = [''.join(stream_code_path+'/'+path).replace('//','/') for path in sub_code_path_list] find_path = stream_code_path stream_info['SKIP_PATHS'] += util.add_skip_path('', stream_code_path, find_path, sub_path_list) if 'SKIP_PATHS' in stream_info: skip_path_list = stream_info['SKIP_PATHS'].split(';') skip_paths_arg = get_skip_paths_arg(stream_code_path, skip_path_list) if stream_code_path == '' or project_dupc_xml == '' or pool_processes == '' or pid_file == '': print('below option is empty!') print('stream_code_path: '+stream_code_path) print('project_dupc_xml: '+project_dupc_xml) print('pool_processes: '+pool_processes) print('pid_file: '+pid_file) exit(1) current_path=sys.path[0]+'/../' dupc_tool_path = os.path.join(current_path, 'tools_dep/dupc/bin/run.sh') os.chmod(dupc_tool_path, 0o755) #print('scaning...') suffix_list = map_suffix_list(suffix_list) language_xml_list = [] print(suffix_list) for suffix in suffix_list: command = dupc_tool_path+" cpd "+"--minimum-tokens 100 --format xml --encoding utf-8 "+ \ " --files "+stream_code_path+" --language "+suffix+" --skip-lexical-errors"+skip_paths_arg +" 2>/dev/null" dupc_p = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True, start_new_session=True) try: pid_config.add_pid(str(dupc_p.pid), pid_file) xml_suffix = "_" + suffix + ".xml" language_dupc_xml = project_dupc_xml.replace(".xml", xml_suffix) with open(language_dupc_xml, "a+", encoding = 'utf-8') as file: is_codefrag = False codefrag = "" indent = "" for line in dupc_p.stdout: line_str = str(line.decode("utf-8")) #不再将codefragment写入xml,用md5对codefragment转换为fingerprint写入 if "<codefragment>" in line_str and "</codefragment>" in line_str: is_codefrag = False codefrag = "" indent = re.search(".*(?=<codefragment)", line_str).group(0) finger_print = indent + "<fingerprint>" + get_md5(line_str) + "</fingerprint>\n" file.write(finger_print) elif "<codefragment>" in line_str: is_codefrag = True codefrag += line_str indent = re.search(".*(?=<codefragment)", line_str).group(0) elif "</codefragment>" in line_str: is_codefrag = False codefrag += line_str finger_print = indent + "<fingerprint>" + get_md5(codefrag) + "</fingerprint>\n" codefrag = "" indent = "" file.write(finger_print) else: if is_codefrag: codefrag += line_str else: file.write(line_str) language_xml_list.append(language_dupc_xml) finally: dupc_p.terminate() dupc_p.wait() os.chdir(current_path) merge_language_xml_files(project_dupc_xml, language_xml_list) parse_project_dupc_xml_file_list(project_dupc_xml, stream_info['PROJECT_FILE_LIST']) parse_project_dupc_xml_to_json(project_dupc_xml, stream_info['PROJECT_FILE_DUPC_JSON'])
def main_input(message): #工具扫描主类 global params_sub start_date = util.get_datetime() #开始时间 config.load_properties() config.get_stream_name_and_tool(message) tool_type = config.properties_info['TOOL_TYPE'] stream_name = config.properties_info['STREAM_NAME'] #load multi tool properties config.load_mutil_tool_properties(tool_type) #map CodeCC API data config.map_properties_info(offline_properties_info) #map command properties info config.map_offline_properties_info(offline_properties_info) #verify all properties # config.verify_info() #update the properties config.properties_update() config.generate_config(config.properties_info) if "SUB_PATH" in config.properties_info: os.environ["PATH"] = config.properties_info[ 'SUB_PATH'] + os.pathsep + os.environ["PATH"] #开始清空pid文件 pid_config.clean_pid(config.properties_info["PID_FILE"]) #添加自身pid pid_config.add_pid(str(os.getpid()), config.properties_info["PID_FILE"]) if 'OFFLINE' in config.properties_info: #排队开始 params_sub = { 'stepNum': str(1), 'startTime': str(int(time.time())), 'endTime': '0', 'msg': '', 'flag': 3 } codecc_web.codecc_upload_task_log(config.params_root, params_sub) #排队结束 params_sub = { 'stepNum': str(1), 'startTime': '0', 'endTime': str(int(time.time())), 'msg': '', 'flag': 1 } codecc_web.codecc_upload_task_log(config.params_root, params_sub) #下载开始 params_sub = { 'stepNum': str(2), 'startTime': str(int(time.time())), 'endTime': '0', 'msg': '', 'flag': 3 } codecc_web.codecc_upload_task_log(config.params_root, params_sub) #download source code latest_info = main.download_code(config.properties_info) #下载结束 params_sub = { 'stepNum': str(2), 'startTime': '0', 'endTime': str(int(time.time())), 'msg': str(latest_info), 'flag': 1 } codecc_web.codecc_upload_task_log(config.params_root, params_sub) #git项目,获取本地remote url和branch config.properties_git_info_update() #扫描开始 scan_finish_message = "" params_sub = { 'stepNum': str(3), 'startTime': str(int(time.time())), 'endTime': '0', 'msg': '', 'flag': 3 } codecc_web.codecc_upload_task_log(config.params_root, params_sub) if tool_type == "cpplint": if "SUB_PATH" in config.properties_info and 'PY27_PATH' in config.properties_info: config.properties_info['SUB_PATH'] = config.properties_info[ "PY27_PATH"] + os.pathsep + config.properties_info["SUB_PATH"] file.skip(config.properties_info) elif tool_type == "pylint" or tool_type == "eslint" or tool_type == "checkstyle" or \ tool_type == "stylecop" or tool_type == "ccn" or tool_type == "detekt" or \ tool_type == "sensitive" or tool_type == "phpcs" or tool_type == "occheck" or \ tool_type == "spotbugs": if "SUB_PATH" in config.properties_info and 'PY35_PATH' in config.properties_info: config.properties_info['SUB_PATH'] = config.properties_info[ "PY35_PATH"] + os.pathsep + config.properties_info["SUB_PATH"] file.skip(config.properties_info) elif tool_type == "dupc" or tool_type == "goml": if "SUB_PATH" in config.properties_info and 'PY35_PATH' in config.properties_info: config.properties_info['SUB_PATH'] = config.properties_info[ "PY35_PATH"] + os.pathsep + config.properties_info["SUB_PATH"] main.main_scan(config.properties_info) #print(util.get_datetime()+" "+tool_type+" generate scm blame and info") scm.generate_blame_and_info(config.properties_info) if tool_type == "goml" and os.path.exists( config.properties_info['STREAM_DATA_PATH'] + '/go_build.log'): with open(config.properties_info['STREAM_DATA_PATH'] + '/go_build.log', "r", encoding='utf-8') as go_build_file: scan_finish_message = go_build_file.read() #扫描结束 params_sub = { 'stepNum': str(3), 'startTime': '0', 'endTime': str(int(time.time())), 'msg': scan_finish_message, 'flag': 1 } codecc_web.codecc_upload_task_log(config.params_root, params_sub) #提交开始 params_sub = { 'stepNum': str(4), 'startTime': str(int(time.time())), 'endTime': '0', 'msg': '', 'flag': 3 } codecc_web.codecc_upload_task_log(config.params_root, params_sub) #json the scan data if os.path.exists(config.properties_info["STREAM_DATA_PATH"]): #print(util.get_datetime()+" "+tool_type+" generate json from stream data") #提交数据 if tool_type == "dupc": main.dupc_generate_data_json(config.properties_info) else: main.generate_data_json(config.properties_info) #提交平均圈复杂度数 if tool_type == "ccn": codecc_web.codecc_upload_avg_ccn( stream_name, config.properties_info['TASK_ID'], config.properties_info['PROJECT_AVG_FILE_CC_LIST']) #提交结束 params_sub = { 'stepNum': str(4), 'startTime': '0', 'endTime': str(int(time.time())), 'msg': '', 'flag': 1 } codecc_web.codecc_upload_task_log(config.params_root, params_sub) print(config) if not 'IGNORE_DELETE_LOG' in config.properties_info: #删除临时文件 private_key = "/tmp/." + stream_name + '_' + tool_type + "_private_key" file.delete_file_folder(private_key) if os.path.exists(config.properties_info["STREAM_DATA_PATH"]): file.delete_file_folder(config.properties_info["STREAM_DATA_PATH"]) #结束清空pid文件 pid_config.clean_pid(config.properties_info["PID_FILE"]) finish_date = util.get_datetime() print(tool_type + ' scan finish: ' + start_date + ' to ' + finish_date)
def check(): global go_build_status stream_code_path = "" project_goml_json = "" skip_paths = "" #添加过滤路径 default_disable_linter = "" #获取默认屏蔽linter列表 build_failed_disable_linter = "" #获取编译失败屏蔽linter列表 stream_result_path = "" current_path = sys.path[0] scan_path = "" go_path = "" bug_data_list = [] go_build_message = "" checkers_options = '' scan_finish_message = '' if 'STREAM_CODE_PATH' in stream_info: stream_code_path = stream_info['STREAM_CODE_PATH'] if 'PROJECT_GOML_JSON' in stream_info: project_goml_json = stream_info['PROJECT_GOML_JSON'] for linter in default_skip_linter: default_disable_linter += " --disable=" + linter + " " if 'STREAM_RESULT_PATH' in stream_info: stream_result_path = stream_info['STREAM_RESULT_PATH'] if "GOROOT" in stream_info: os.environ["GOROOT"] = stream_info['GOROOT'] if "GO15VENDOREXPERIMENT" in stream_info: os.environ["GO15VENDOREXPERIMENT"] = stream_info[ 'GO15VENDOREXPERIMENT'] if 'SUB_CODE_PATH_LIST' in stream_info and stream_info[ 'SUB_CODE_PATH_LIST'] != '': sub_code_path_list = stream_info['SUB_CODE_PATH_LIST'].split(',') sub_path_list = [ ''.join(stream_code_path + '/' + path).replace('//', '/') for path in sub_code_path_list ] find_path = stream_code_path stream_info['SKIP_PATHS'] += util.add_skip_path( '', stream_code_path, find_path, sub_path_list) if "SKIP_PATHS" in stream_info: skip_path_list = stream_info['SKIP_PATHS'].split(';') for skip_path in skip_path_list: skip_path = skip_path.replace(".*/", '').replace("/.*", '').replace( ".*", '').replace("*", '') if skip_path.replace(' ', '') == "": continue if re.search("^src/", skip_path): skip_path = skip_path[4:] skip_paths += " --skip=\"" + skip_path + "\" " if 'CHECKER_OPTIONS' in stream_info and stream_info[ 'CHECKER_OPTIONS'] != '': checker_options = json.loads(stream_info['CHECKER_OPTIONS']) for checker_option in checker_options.values(): checker_option = json.loads(checker_option) keys = checker_option.keys() for key in keys: checkers_options += ' --' + key + '=' + checker_option[key] if stream_code_path == '' or project_goml_json == '' or stream_result_path == '': print('below option is empty!') print('stream_code_path: ' + stream_code_path) print('project_goml_json: ' + project_goml_json) print('stream_result_path: ' + stream_result_path) exit(1) go_path = stream_code_path workspace = stream_code_path if "REL_PATH" in stream_info and stream_info['REL_PATH'] != '': go_path = ''.join(go_path.replace(stream_info['REL_PATH'], '')) workspace = go_path if "GO_PATH" in stream_info and stream_info['GO_PATH'] != '': rel_go_path_list = stream_info['GO_PATH'].split(';') for rel_go_path in rel_go_path_list: if os.path.exists(workspace + '/' + rel_go_path): go_path += os.pathsep + workspace + '/' + rel_go_path os.environ["GOPATH"] = go_path os.chdir(stream_code_path) print('GOPATH: ' + go_path) command = "go build ./..." go_build_p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True, env=dict(os.environ, LANG="C", LC_ALL="C")) try: pid_config.add_pid(str(go_build_p.pid), stream_info["PID_FILE"]) for line in go_build_p.stdout: line = str(line.decode('utf-8')) if "" != line and 'GOPATH' in line: go_build_message += line.replace(workspace, '$WORKSPACE').replace( '(from $GOPATH)', '') finally: go_build_p.terminate() go_build_p.wait() if "WORKSPACE" in go_build_message: scan_finish_message += "Please check your GOPATH para in CodeCC. If you don't upload all golang dependent libraries to svn/git, please ignore this warning. \nCannot find below package: \n" + go_build_message print(scan_finish_message) if 'STREAM_DATA_PATH' in stream_info and os.path.exists( stream_info['STREAM_DATA_PATH']): with open(stream_info['STREAM_DATA_PATH'] + '/go_build.log', "w", encoding='utf-8') as go_build_file: go_build_file.write(scan_finish_message) go_build_status = 'false' if 'false' == go_build_status: for linter in go_build_faild_skip_linter: build_failed_disable_linter += " --disable=" + linter + " " #codecc_web.upload_goml_project_dir_struct_checker(stream_info['TOOL_TYPE'].upper(), 'true', 'true') print("go gometalinter ./...") command = "gometalinter ./... --sort=path --deadline=60m --json --enable-all " + checkers_options + default_disable_linter + " " + build_failed_disable_linter + " " + skip_paths + " --exclude=vendor -j 2" goml_p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, start_new_session=True) try: pid_config.add_pid(str(goml_p.pid), stream_info["PID_FILE"]) for line in goml_p.stdout: line = str(line.decode('utf-8')) #过滤不是告警行 if not 'severity' in line: continue result = json.loads(line.replace('},', '}')) if 'vet' == result['linter']: result['message'] = 'vet/vet->' + result['message'] if 'gas' == result['linter']: result['message'] = result['message'].replace(',xxx', '') if build_error_check(result, stream_info): continue #print(util.get_datetime()+" "+line) bug_data_list.append(result) with open(project_goml_json, "a+", encoding='utf-8') as file: if len(bug_data_list) > 0: file.write(json.dumps(bug_data_list)) finally: goml_p.terminate() goml_p.wait() os.chdir(current_path) parse_project_goml_json_file_list(stream_info) parse_project_goml_json_file_error(stream_info)