示例#1
0
def scan_occheck(filename, stream_result_path, pid_file, config_path):
    filename_result = result.get_result_file_path(stream_result_path, filename)
    occheck_err_file = filename_result + ".error"
    occheck_json = filename_result + ".json"
    #print("occheck scan: " + filename)
    current_path = sys.path[0] + '/../'
    occheck_bin = os.path.join(current_path, 'tools_dep/occheck')
    command = "python " + occheck_bin + "/occheck.py " + filename + " --config " + config_path + " -f json -o " + occheck_json
    occheck_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \
        shell=True, start_new_session=True)
    try:
        pid_config.add_pid(str(occheck_p_2.pid), pid_file)
        for line in occheck_p_2.stdout:
            continue
    finally:
        occheck_p_2.terminate()
        occheck_p_2.wait()

    if os.path.isfile(occheck_json):
        try:
            with open(occheck_json, 'r', encoding='utf-8') as occheckjsonfile:
                parsed_json = json.load(occheckjsonfile)
                with open(occheck_err_file, 'w', encoding='utf-8') as logfile:
                    for checker_json in parsed_json:
                        logfile.write(filename + '->' + str(checker_json['Line']) + '->' + \
                            checker_json['CheckName'] + '->' + checker_json['Message'] + '\n')
        except Exception as e:
            print('=>occheck_ext.py->scan_occheck->ERROR: ' + str(e) + "->" +
                  occheck_json)
示例#2
0
def scan_detekt(filename, stream_result_path, pid_file, config_path):
    filename_result = result.get_result_file_path(stream_result_path, filename)
    detekt_err_file = filename_result + ".error"
    detekt_xml = filename_result + ".xml"
    #print("detekt scan: " + filename)
    current_path = sys.path[0] + '/../'
    detekt_bin = os.path.join(current_path, 'tools_dep/detekt')
    command = "java -jar " + detekt_bin + "/detekt-cli-1.0.0-RC14-all.jar " + "-i " + \
        filename + " -c " + config_path + " -r xml:" + detekt_xml
    detekt_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \
        shell=True, start_new_session=True)
    try:
        pid_config.add_pid(str(detekt_p_2.pid), pid_file)
        for line in detekt_p_2.stdout:
            pass
    finally:
        detekt_p_2.terminate()
        detekt_p_2.wait()

    if os.path.isfile(detekt_xml):
        try:
            with open(detekt_err_file, 'w', encoding='utf-8') as logfile:
                tree = ET.ElementTree(file=detekt_xml)
                for elem in tree.iter():
                    if elem.tag == "error":
                        checker = elem.attrib['source'].replace("detekt.", "")
                        logfile.write(filename + '->' + elem.attrib['line'] + '->' + \
                            checker + '->' + elem.attrib['message'] + '\n')
        except Exception as e:
            print('=>detekt_ext.py->scan_detekt->ERROR: ' + str(e) + "->" +
                  detekt_xml)
示例#3
0
def scan_cpplint(cpplint_file, filename, stream_result_path, skip_filters,
                 open_checkers, checkers_options):
    filename_result = result.get_result_file_path(stream_result_path, filename)
    cpplint_error_file = filename_result + ".error"
    filters = '--filter=' + skip_filters + ',' + open_checkers
    cpplint_path = sys.path[0] + '/../tools_dep/cpplint/' + cpplint_file + ".py"
    command = "python " + cpplint_path + " --output=codecc " + checkers_options + "  " + filters + " " + filename + " >" + cpplint_error_file + ' 2>&1'
    os.system(command)
示例#4
0
def scan_checkstyle(filename, stream_result_path, skip_filters, pid_file,
                    config_path):
    filename_result = result.get_result_file_path(stream_result_path, filename)
    checkstyle_err_file = filename_result + ".error"
    checkstyle_xml = filename_result + ".xml"
    #print("checkstyle scan: "+filename)
    current_path = sys.path[0] + '/../'
    checkstyle_bin = os.path.join(current_path, 'tools_dep/checkstyle')
    classpath = ''
    if os_type == "Windows":
        classpath = checkstyle_bin + '/checkstyle-8.11-all.jar'
    else:
        classpath = checkstyle_bin + '/checkstyle-8.11-all.jar'
    command = "java  -classpath  " + classpath + " com.puppycrawl.tools.checkstyle.Main -c " + config_path + " -f xml " + filename + " >" + checkstyle_xml
    checkstyle_p_2 = subprocess.Popen(command,
                                      stdout=subprocess.PIPE,
                                      stderr=subprocess.STDOUT,
                                      shell=True,
                                      start_new_session=True)
    try:
        pid_config.add_pid(str(checkstyle_p_2.pid), pid_file)
        for line in checkstyle_p_2.stdout:
            print(line)
    finally:
        checkstyle_p_2.terminate()
        checkstyle_p_2.wait()

    with open(checkstyle_xml, "rU+", encoding='utf-8') as file:
        lines = file.readlines()
        for line in lines:
            if not re.search('^<', line):
                lines.remove(line)
        lines_str = ''.join(lines).strip()
        file.seek(0)
        file.truncate()
        file.write(lines_str)

    #print("parse xml: "+checkstyle_xml)
    disable_option_array = skip_filters.split(";")
    if os.path.isfile(checkstyle_xml):
        try:
            with open(checkstyle_err_file, 'w', encoding='utf-8') as logfile:
                tree = ET.ElementTree(file=checkstyle_xml)
                for elem in tree.iter():
                    if "error" == elem.tag:
                        #if elem.attrib['source'] in disable_option_array:
                        #   continue
                        logfile.write(filename + '->' + elem.attrib['line'] +
                                      '->' + elem.attrib['source'] + '->' +
                                      elem.attrib['message'] + '\n')
        except Exception as e:
            print('=>checkstyle_ext.py->scan_checkstyle->ERROR:' + str(e) +
                  checkstyle_xml)
示例#5
0
def scan_sensitive(filename, stream_info):
    stream_result_path = stream_info['STREAM_RESULT_PATH']
    pid_file = stream_info['PID_FILE']
    stream_name = stream_info['STREAM_NAME']
    proj_owner = stream_info['PROJ_OWNER']
    proj_owner = proj_owner.replace(';', ',')
    skip_checkers = []
    if 'SKIP_CHECKERS' in stream_info and stream_info['SKIP_CHECKERS'] != "":
        skip_checkers = stream_info['SKIP_CHECKERS'].split(";")
    filename_result = result.get_result_file_path(stream_result_path, filename)
    sensitive_err_file = filename_result + ".error"
    sensitive_json = filename_result + ".json"
    #print("sensitive scan: " + filename)
    tool_path = get_tool_path()
    command = tool_path + " " + stream_name + " " + \
        proj_owner + " " + filename + " -f " + sensitive_json
    sensitive_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \
        shell=True, start_new_session=True)
    try:
        pid_config.add_pid(str(sensitive_p_2.pid), pid_file)
        for line in sensitive_p_2.stdout:
            continue
            #line = str(line.decode('utf-8'))
            #print(line)
    finally:
        sensitive_p_2.terminate()
        sensitive_p_2.wait()

    if os.path.isfile(sensitive_json):
        with open(sensitive_json, 'r', encoding='utf-8') as sensitivejsonfile:
            try:
                reg = re.search("(?<=scan_result:).*",
                                sensitivejsonfile.read())
                parsed_json = reg.group(0) if reg else "{ }"
                parsed_json = json.loads(parsed_json)
                with open(sensitive_err_file, 'w',
                          encoding='utf-8') as logfile:
                    if "check_report" in parsed_json:
                        for file_json in parsed_json["check_report"]:
                            if not str(
                                    file_json['rule_name']) in skip_checkers:
                                logfile.write(filename + '->' + str(file_json['line_no']) + '->' + \
                                    str(file_json['rule_name']) + '->' + \
                                        str(file_json['explanation']) + '\n')
            except Exception as e:
                print('=>sensitive_ext.py->scan_sensitive->ERROR: ' + str(e) + "->" + \
                    sensitive_json + "->" + sensitive_json.read())
示例#6
0
def scan_eslint(filename, eslintrc, stream_result_path, skip_filters, pid_file,
                config_path):
    filename_result = result.get_result_file_path(stream_result_path, filename)
    eslint_err_file = filename_result + ".error"
    eslint_json = filename_result + ".json"
    #print("eslint scan: "+filename)
    current_path = sys.path[0] + '/../'
    eslint_bin = os.path.join(current_path, 'tools_dep/eslintrc')
    command = "eslint --no-eslintrc -f json -c " + config_path + " " + filename + " >" + eslint_json
    #print(command)
    eslint_p_2 = subprocess.Popen(command,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.STDOUT,
                                  shell=True,
                                  start_new_session=True)
    try:
        pid_config.add_pid(str(eslint_p_2.pid), pid_file)
        for line in eslint_p_2.stdout:
            if 'RangeError' in line:
                return
            print(line)
    finally:
        eslint_p_2.terminate()
        eslint_p_2.wait()

    #print("parse json: "+eslint_json)
    disable_option_array = skip_filters.split(";")
    if os.path.isfile(eslint_json):
        with open(eslint_json, 'r', encoding='utf-8') as eslintjsonfile:
            try:
                parsed_json = json.load(eslintjsonfile)
                with open(eslint_err_file, 'w', encoding='utf-8') as logfile:
                    for file_json in parsed_json:
                        path = file_json["filePath"]
                        error_list = file_json["messages"]
                        for line_json in error_list:
                            #if not line_json["ruleId"] in disable_option_array:
                            logfile.write(
                                str(path) + "->" + str(line_json["line"]) +
                                "->" + str(line_json["ruleId"]) + "->" +
                                str(line_json["message"]) + "->" +
                                str(line_json["nodeType"]) + "->" +
                                str(line_json["column"]).replace("\n", "\\n") +
                                "\n")
            except ValueError:
                print("parse json error, maybe empty :" + eslint_json)
示例#7
0
def scan_ccn(filename, stream_result_path, pid_file, ccn_number):
    filename_result = result.get_result_file_path(stream_result_path, filename)
    ccn_err_file = filename_result + ".error"
    if os.path.isfile(filename):
        current_path = sys.path[0] + '/../'
        ccn_bin = os.path.join(current_path, 'tools_dep/lizard')
        command = "python " + ccn_bin + "/lizard.py " + filename + " -w -C " + ccn_number + " -L 100000 >" + ccn_err_file
        ccn_p_2 = subprocess.Popen(command,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT,
                                   shell=True,
                                   start_new_session=True)
        try:
            pid_config.add_pid(str(ccn_p_2.pid), pid_file)
            for line in ccn_p_2.stdout:
                print(line)
        finally:
            ccn_p_2.terminate()
            ccn_p_2.wait()
示例#8
0
def scan_stylecop(filename, stream_result_path, skip_filters, pid_file,
                  config_path):
    filename_result = result.get_result_file_path(stream_result_path, filename)
    stylecop_err_file = filename_result + ".error"
    stylecop_xml = filename_result + ".xml"
    #print("stylecop scan: "+filename)
    current_path = sys.path[0] + '/../'
    stylecop_bin = os.path.join(current_path, 'tools_dep/stylecop')
    os.chdir(stylecop_bin)
    command = 'mono StyleCopCLI.exe -set ' + config_path + ' -cs \"' + filename + '\" -out ' + stylecop_xml
    stylecop_p_2 = subprocess.Popen(command,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT,
                                    shell=True,
                                    start_new_session=True)
    try:
        pid_config.add_pid(str(stylecop_p_2.pid), pid_file)
        for line in stylecop_p_2.stdout:
            print(line)
    finally:
        stylecop_p_2.terminate()
        stylecop_p_2.wait()

    #print("parse xml: "+stylecop_xml)
    disable_option_array = skip_filters.split(";")
    if os.path.isfile(stylecop_xml):
        try:
            with open(stylecop_err_file, 'w', encoding='utf-8') as logfile:
                tree = ET.ElementTree(file=stylecop_xml)
                for elem in tree.iter():
                    if "Violation" == elem.tag:
                        if elem.attrib['Rule'] in disable_option_array or elem.attrib[
                                'Rule'] == 'CurlyBracketsForMultiLineStatementsMustNotShareLine' or elem.attrib[
                                    'Rule'] == 'SyntaxException':
                            continue
                        logfile.write(filename + '->' +
                                      elem.attrib['LineNumber'] + '->' +
                                      elem.attrib['Rule'] + '->' + elem.text +
                                      '\n')
        except Exception as e:
            print('=>stylecop_ext.py->scan_stylecop->ERROR:' + str(e) +
                  stylecop_xml)
示例#9
0
def scan_phpcs(filename, stream_info):
    stream_result_path = stream_info['STREAM_RESULT_PATH']
    pid_file = stream_info['PID_FILE']
    phpcs_standard = stream_info['PHPCS_STANDARD']
    skip_checkers = []
    if 'SKIP_CHECKERS' in stream_info and stream_info['SKIP_CHECKERS'] != "":
        skip_checkers = stream_info['SKIP_CHECKERS'].split(";")
    filename_result = result.get_result_file_path(stream_result_path, filename)
    phpcs_err_file = filename_result + ".error"
    phpcs_xml = filename_result + ".xml"
    #print("phpcs scan: " + filename)
    current_path = sys.path[0] + '/../'
    phpcs_bin = os.path.join(current_path, 'tools_dep/phpcs')
    command = "php " + phpcs_bin + "/phpcs.phar " + "--standard=" + phpcs_standard + " " + \
        filename + " --report=xml > " + phpcs_xml
    phpcs_p_2 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \
        shell=True, start_new_session=True)
    try:
        pid_config.add_pid(str(phpcs_p_2.pid), pid_file)
        for line in phpcs_p_2.stdout:
            line = str(line.decode('utf-8'))
            print(line)
    finally:
        phpcs_p_2.terminate()
        phpcs_p_2.wait()

    if os.path.isfile(phpcs_xml):
        try:
            with open(phpcs_err_file, 'w', encoding='utf-8') as logfile:
                tree = ET.ElementTree(file=phpcs_xml)
                for elem in tree.iter():
                    if (elem.tag == "error" or elem.tag == "warning") and \
                        not elem.attrib['source'] in skip_checkers:
                        logfile.write(filename + '->' + elem.attrib['line'] + '->' + \
                            elem.attrib['source'] + '->' + elem.text + '\n')
        except Exception as e:
            print('=>phpcs_ext.py->scan_phpcs->ERROR: ' + str(e) + "->" +
                  phpcs_xml)
示例#10
0
def scan_pylint(filename, pylint_path, stream_result_path, disable_option,
                py_path, pid_file, config_path):
    filename_result = result.get_result_file_path(stream_result_path, filename)
    pylint_err_file = filename_result + ".error"
    pylint_json = filename_result + ".json"

    command = "python lint.py --output-format=json --reports=n --rcfile " + config_path + " " + filename + " >" + pylint_json
    try:
        os.chdir(pylint_path)
        #print("pylint scan: "+filename)
        pylint_p_2 = subprocess.Popen(command,
                                      stdout=subprocess.PIPE,
                                      stderr=subprocess.STDOUT,
                                      shell=True,
                                      start_new_session=True)
        pid_config.add_pid(str(pylint_p_2.pid), pid_file)
        for line in pylint_p_2.stdout:
            print(line)
    finally:
        pylint_p_2.terminate()
        pylint_p_2.wait()

    #print("parse json: "+pylint_json)
    if os.path.isfile(pylint_json):
        with open(pylint_json, 'r', encoding='utf-8') as pylintjsonfile:
            try:
                parsed_json = json.load(pylintjsonfile)
                with open(pylint_err_file, 'w', encoding='utf-8') as logfile:
                    for line_json in parsed_json:
                        logfile.write(
                            str(line_json["path"]) + "->" +
                            str(line_json["line"]) + "->" +
                            str(line_json["symbol"]) + "->" +
                            str(line_json["message"]).replace("\n", "\\n") +
                            "\n")
            except ValueError:
                #print("parse json error, maybe empty :"+pylint_json)
                pass
示例#11
0
def parse_project_goml_json_file_error(stream_info):
    scan_path = stream_info['STREAM_CODE_PATH']
    project_goml_json = stream_info['PROJECT_GOML_JSON']
    if os.path.isfile(project_goml_json) and os.path.getsize(
            project_goml_json):
        with open(project_goml_json, "r", encoding='utf-8') as jsonfile:
            all_result_json = json.load(jsonfile)
            for line in all_result_json:
                file_path = scan_path + '/' + line["path"]
                filename_result = result.get_result_file_path(
                    stream_info['STREAM_RESULT_PATH'], file_path)
                file_error_path = filename_result + '.error'
                error_line = str(file_path) + '->' + str(
                    line["line"]) + '->' + str(line["linter"]) + '->' + str(
                        line["message"]) + ''
                try:
                    with open(file_error_path, "a+",
                              encoding='utf-8') as file_list:
                        file_list.write(error_line + "\n")
                except Exception as e:
                    print(
                        "=>main.py->parse_project_goml_json_file_error->ERROR:"
                        + str(e) + file_error_path)
示例#12
0
def check():
    current_path = sys.path[0]
    project_file_list = ""
    stream_result_path = ""
    pool_processes = ""
    skip_filters = ""
    pid_file = ""
    config_path = ""
    if 'PROJECT_FILE_LIST' in stream_info:
        project_file_list = stream_info['PROJECT_FILE_LIST']
    if 'STREAM_RESULT_PATH' in stream_info:
        stream_result_path = stream_info['STREAM_RESULT_PATH']
    if 'POOL_PROCESSES' in stream_info:
        pool_processes = stream_info['POOL_PROCESSES']
    if 'SKIP_CHECKERS' in stream_info:
        skip_filters = stream_info['SKIP_CHECKERS']
    if 'PID_FILE' in stream_info:
        pid_file = stream_info['PID_FILE']
    if 'CONFIG_PATH' in stream_info:
        config_path = stream_info['CONFIG_PATH']
    if 'STREAM_CODE_PATH' in stream_info:
        stream_code_path = stream_info['STREAM_CODE_PATH']
    if project_file_list == '' or stream_result_path == '' or pool_processes == '' or pid_file == '':
        print('below option is empty!')
        print('project_file_list: ' + project_file_list)
        print('stream_result_path: ' + stream_result_path)
        print('pool_processes: ' + pool_processes)
        print('pid_file: ' + pid_file)
        exit(1)

    os.chdir(stream_code_path)
    #运行编译命令
    if 'PROJECT_BUILD_COMMAND' in stream_info:
        os.system(stream_info['PROJECT_BUILD_COMMAND'])

    #配置project_class_file_list
    stream_info['PROJECT_CLASS_FILE_LIST'] = os.path.join(
        stream_info["STREAM_DATA_PATH"], 'project_class_file_list.txt')

    #生成class文件列表
    file.general_class_list_file(stream_info)

    #如果无project_class_file_list返回
    if not os.path.isfile(stream_info['PROJECT_CLASS_FILE_LIST']):
        return

    current_path = sys.path[0] + '/../'
    spotbugs_lib = os.path.join(current_path, 'tools_dep/spotbugs/lib')
    spotbugs_ouput_xml = os.path.join(stream_info["STREAM_DATA_PATH"],
                                      'spotbugs_ouput.xml')

    command = 'java -jar ' + spotbugs_lib + '/spotbugs.jar -textui  -include ' + config_path + ' -xdocs  -output ' + spotbugs_ouput_xml + ' -analyzeFromFile ' + stream_info[
        'PROJECT_CLASS_FILE_LIST']

    spotbugs_p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, \
        shell=True, start_new_session=True)
    try:
        pid_config.add_pid(str(spotbugs_p.pid), pid_file)
        for line in spotbugs_p.stdout:
            pass
    finally:
        spotbugs_p.terminate()
        spotbugs_p.wait()
        os.chdir(current_path)

    if os.path.isfile(spotbugs_ouput_xml):
        try:
            sub_root = ET.ElementTree(file=spotbugs_ouput_xml).getroot()
            for elem in sub_root.findall("file"):
                try:
                    reg_path = elem.attrib['classname'].replace('.',
                                                                '/') + '.java'
                    file_path = file.find_project_file_list_path(
                        reg_path, project_file_list)
                    if '' == file_path:
                        continue
                    filename_result = result.get_result_file_path(
                        stream_result_path, file_path)
                    spotbugs_err_file = filename_result + ".error"
                    with open(spotbugs_err_file, 'w',
                              encoding='utf-8') as logfile:
                        for sub_elem in elem.iter():
                            try:
                                logfile.write(file_path + '->' +
                                              sub_elem.attrib['line'] + '->' +
                                              sub_elem.attrib['type'] + '->' +
                                              sub_elem.attrib['message'] +
                                              '\n')
                            except:
                                pass
                except:
                    pass
        except Exception as e:
            print('=>spotbugs_ext.py->check->ERROR: ' + str(e) + "->" +
                  spotbugs_ouput_xml)
示例#13
0
文件: scm.py 项目: BleRui/leozheng
def blame_run(file_path, stream_info, stream_code_path):
    try:
        filename_result=result.get_result_file_path(stream_info["STREAM_RESULT_PATH"],file_path)
        file_path_url=filename_result+".scm_url.xml"
        file_path_info=filename_result+".scm_info.xml"
        file_path_blame=filename_result+".scm_blame.xml"
        txt_file_path_blame=filename_result+".scm_blame.txt"
        #print(util.get_datetime()+" get "+stream_info["SCM_TYPE"]+" "+stream_info["CERT_TYPE"]+" scm info "+file_path)
        #print(util.get_datetime()+" get "+stream_info["SCM_TYPE"]+" "+stream_info["CERT_TYPE"]+" scm blame "+file_path)
        scm_info_command = ""
        scm_blame_command = ""
        scm_url_command = ""
        
        if stream_info["SCM_TYPE"] == "svn" and stream_info["CERT_TYPE"] == "http":
            scm_url_command = "svn info --non-interactive  --no-auth-cache --trust-server-cert --username "+stream_info["ACCOUNT"]+" --password "+stream_info["PASSWORD"]+" --xml \""+file_path+"\" >\""+file_path_url+"\""
            scm_info_command = "svn log -r 1:HEAD --limit 1 --xml --non-interactive  --no-auth-cache --trust-server-cert --username "+stream_info["ACCOUNT"]+" --password "+stream_info["PASSWORD"]+" \""+file_path+"\" >\""+file_path_info+"\""
            scm_blame_command = "svn blame --non-interactive  --no-auth-cache --trust-server-cert --username "+stream_info["ACCOUNT"]+" --password "+stream_info["PASSWORD"]+" --xml \""+file_path+"\" >\""+file_path_blame+"\""
        elif stream_info["SCM_TYPE"] == "svn" and stream_info["CERT_TYPE"] == "ssh" and not 'OFFLINE' in stream_info:
            ssh_access_command = ssh.scm_ssh_access(stream_info)
            scm_url_command = " svn info  --xml \""+file_path+"\" >\""+file_path_url+"\""
            scm_info_command = ssh_access_command+" svn log -r 1:HEAD --limit 1 --xml \""+file_path+"\" | grep -v ^$ | grep -v \'Agent pid\' >\""+file_path_info+"\""
            scm_blame_command = ssh_access_command+" svn blame  --xml \""+file_path+"\" | grep -v ^$ | grep -v \'Agent pid\' | grep -v \'spawn ssh-add\' | grep -v \'Enter passphrase for\' | grep -v \'Identity added:\' >\""+file_path_blame+"\""
        elif stream_info["SCM_TYPE"] == "svn" and stream_info["CERT_TYPE"] == "ssh" and 'OFFLINE' in stream_info:
            scm_url_command = " svn info  --xml \""+file_path+"\" >\""+file_path_url+"\""
            scm_info_command = " svn log -r 1:HEAD --limit 1 --xml \""+file_path+"\" >\""+file_path_info+"\""
            scm_blame_command = " svn blame  --xml \""+file_path+"\" >\""+file_path_blame+"\""
        elif stream_info["SCM_TYPE"] == "git":
            module_list = get_modules(stream_code_path)
            for info in module_list:
                    sub_path = ''.join(stream_code_path+'/'+info['path']).replace('//','/')
                    if sub_path in file_path:
                        #print('match :'+str(file_path))
                        os.chdir(sub_path)
                        break
            else:
                os.chdir(stream_code_path)
            scm_url_command = "git log --pretty=format:%h \""+file_path+"\" >\""+file_path_url+"\""
            scm_info_command = "git log --pretty=format:\"%ad\" --date=iso --reverse \""+file_path+"\" >\""+file_path_info+"\""
            scm_blame_command = "git blame \""+file_path+"\" -t >\""+file_path_blame+"\""
        elif stream_info["SCM_TYPE"] == "http_download":
            scm_url_command = 'echo >\"'+file_path_url+'\"'
            scm_info_command = 'echo >\"'+file_path_info+'\"'
            scm_blame_command = 'echo >\"'+file_path_blame+'\"'
            os.system('echo >\"'+txt_file_path_blame+'\"')
        os.system(scm_info_command + ' 2>/dev/null')
        os.system(scm_blame_command + ' 2>/dev/null')
        os.system(scm_url_command + ' 2>/dev/null')
        check_rerun = False
        if stream_info["SCM_TYPE"] != 'http_download':
            if os.path.isfile(file_path_blame):
                try:
                    with open(file_path_blame, "rb") as file:
                        allens = len(file.readlines())
                        if allens < 3:
                            check_rerun = True
                except Exception as e:
                    print("=>scm.py->blame_run->ERROR:"+str(e)+file_path_blame)
            #if check_rerun:
            #    #print(util.get_datetime()+" scm blame failed again try: "+file_path_blame)
            #    os.system(scm_blame_command)
            translate_blame_xml(file_path_blame, txt_file_path_blame, stream_info)
    except Exception as e:
        raise Exception(e)
示例#14
0
def generate_file_data_json(stream_info, file_path):
    try:
        tool_type = stream_info['TOOL_TYPE']
        filename_result = result.get_result_file_path(
            stream_info["STREAM_RESULT_PATH"], file_path)
        error_file = filename_result + ".error"
        xml_file_path_url = filename_result + ".scm_url.xml"
        xml_file_path_info = filename_result + ".scm_info.xml"
        txt_file_path_blame = filename_result + ".scm_blame.txt"
        if not os.path.isfile(error_file) or not os.path.isfile(
                xml_file_path_info) or not os.path.isfile(
                    txt_file_path_blame) or not os.path.isfile(
                        xml_file_path_url):
            return

        error_lines = []
        blame_lines = []
        file_change_time = ""
        file_scm_info = {}
        filedata = {}
        with open(error_file, "r", encoding='utf-8') as error_file_lines:
            error_lines = error_file_lines.readlines()
        with open(txt_file_path_blame, "r", encoding='utf-8') as blame_line:
            blame_lines = blame_line.readlines()
        file_change_time = scm.parse_info_xml(xml_file_path_info, stream_info)
        file_scm_info = scm.parse_log_xml(xml_file_path_url, file_path,
                                          stream_info)
        if len(error_lines) <= 0:
            return

        if tool_type == 'ccn':
            with open(stream_info['PROJECT_AVG_FILE_CC_LIST'],
                      "a+",
                      encoding='utf-8') as cc_file:
                portalocker.lock(cc_file, portalocker.LOCK_EX)
                cc_file.write(str(error_lines[len(error_lines) - 1]))

        filedata["filename"] = file_path
        filedata["file_change_time"] = file_change_time
        if 'TOOL_TYPE' in stream_info:
            filedata["tool_name"] = stream_info['TOOL_TYPE']
        if 'TASK_ID' in stream_info:
            filedata["task_id"] = stream_info['TASK_ID']
        if 'STREAM_NAME' in stream_info:
            filedata["stream_name"] = stream_info['STREAM_NAME']
        if 'url' in file_scm_info:
            filedata["url"] = file_scm_info['url']
        if 'repo_id' in file_scm_info:
            filedata["repo_id"] = file_scm_info['repo_id']
        if 'revision' in file_scm_info:
            filedata["revision"] = file_scm_info['revision']
        if 'branch' in file_scm_info:
            filedata["branch"] = file_scm_info['branch']
        if 'rel_path' in file_scm_info:
            filedata["rel_path"] = file_scm_info['rel_path']
        if 'sub_module' in file_scm_info:
            filedata["sub_module"] = file_scm_info['sub_module']
        defects = []

        if tool_type == "ccn":
            defects = defects_data_ccn(stream_info, error_lines, blame_lines)
        else:
            defects = defects_data(stream_info, error_lines, blame_lines)

        if len(defects) > 0:
            #压缩字符串
            zip_bytes = zlib.compress(
                bytes(json.dumps(defects), encoding='utf-8'))
            #base64编码
            zip_str = base64.b64encode(zip_bytes).decode('utf-8')
            filedata["defectsCompress"] = zip_str
            filedata_data = json.dumps(filedata)
            filedata_data = filedata_data.replace(": ", ":")
            #print(filedata_data)
            codecc_web.codecc_upload_file_json(filedata_data)
        #else:
        #    print(util.get_datetime()+" can not found defect file "+file_path)
    except Exception as e:
        raise Exception(e)
示例#15
0
def dupc_generate_data_json(stream_info):
    try:
        all_result_json = {}
        with open(stream_info['PROJECT_FILE_DUPC_JSON'], "r",
                  encoding='utf-8') as jsonfile:
            all_result_json = json.load(jsonfile)

        for idx, file_info in enumerate(all_result_json['files_info']):
            filedata = {}
            filename_result = result.get_result_file_path(
                stream_info["STREAM_RESULT_PATH"], file_info['file_path'])
            txt_file_path_blame = filename_result + ".scm_blame.txt"
            xml_file_path_info = filename_result + ".scm_info.xml"
            xml_file_path_url = filename_result + ".scm_url.xml"
            file_info["file_change_time"] = scm.parse_info_xml(
                xml_file_path_info, stream_info)
            filedata = scm.parse_log_xml(xml_file_path_url,
                                         file_info['file_path'], stream_info)
            blameline = ""
            file_author_set = set([])
            if os.path.isfile(txt_file_path_blame):
                with open(txt_file_path_blame, "r",
                          encoding='utf-8') as blame_line:
                    blameline = blame_line.readlines()
            for block_idx, block in enumerate(file_info['block_list']):
                author_name_set = set([])
                author_name_list = []
                author_list = {}
                lines_list = range(0, len(blameline))

                if len(blameline) > int(
                        block['start_lines']) and len(blameline) < int(
                            block['end_lines']):
                    lines_list = range(int(block['start_lines']),
                                       len(blameline))

                elif len(blameline) > int(block['end_lines']):
                    lines_list = range(int(block['start_lines']),
                                       int(block['end_lines']))

                for i in lines_list:
                    line_blame_data = blameline[i].split('->')
                    author_name_list.append(line_blame_data[1])
                    author_name_set.add(line_blame_data[1])
                    if line_blame_data[1] in author_list:
                        change_time = author_list[
                            line_blame_data[1]].strip().split('.')[0]
                        if util.compare(
                                change_time,
                                line_blame_data[2].strip().split('.')[0]):
                            author_list[line_blame_data[1]] = line_blame_data[
                                2].strip()
                    else:
                        author_list[
                            line_blame_data[1]] = line_blame_data[2].strip()
                author_info = ""
                for author_name in author_name_set:
                    if author_info != "":
                        info_array = author_info.split('->')
                        if int(info_array[1]) < author_name_list.count(
                                author_name):
                            author_info = author_name + "->" + str(
                                author_name_list.count(author_name)
                            ) + "->" + author_list[author_name]
                    else:
                        author_info = author_name + "->" + str(
                            author_name_list.count(
                                author_name)) + "->" + author_list[author_name]
                if author_info != "":
                    file_author_set.add(author_info.split('->')[0])
                    block['author'] = author_info.split('->')[0]
                    temp_datetime = str(author_info.split('->')[2])
                    st = time.strptime(temp_datetime, '%Y-%m-%dT%H:%M:%S.%f')
                    final_datetime = time.mktime(st)
                    block['latest_datetime'] = final_datetime
                file_info['block_list'][block_idx] = block
            file_info['author_list'] = ';'.join(file_author_set)
            filedata['tool_name'] = 'dupc'
            filedata["stream_name"] = stream_info['STREAM_NAME']
            filedata["task_id"] = stream_info['TASK_ID']
            #压缩字符串
            zip_bytes = zlib.compress(
                bytes(json.dumps(file_info), encoding='utf-8'))
            #base64编码
            zip_str = base64.b64encode(zip_bytes).decode('utf-8')
            filedata["defectsCompress"] = zip_str
            filedata_data = json.dumps(filedata)
            filedata_data = filedata_data.replace(": ", ":")
            #print(util.get_datetime()+" start upload file "+file_info['file_path'])
            #print(filedata_data)
            codecc_web.codecc_upload_file_json(filedata_data)

        project_summary = {}
        project_summary['stream_name'] = stream_info['STREAM_NAME']
        project_summary['task_id'] = stream_info['TASK_ID']
        project_summary['scan_summary'] = all_result_json['scan_summary']
        summary_data = json.dumps(project_summary)
        summary_data = summary_data.replace(": ", ":")
        #print(util.get_datetime()+" start submit summary_data")
        codecc_web.upload_project_dupc_summary(summary_data)
    except Exception as e:
        raise Exception(e)