def main(dir_path,check_result = True,false_reason = []):
    data_report = loadTag('config', 'data_report_daily.json')
    if check_result:
        tag_data = loadTag(dir_path,'data-tag.json')
        if tag_data['master']:
            data_report=generateMasterTag(dir_path,tag_data,data_report)
        else:
            data_report=generateFeatureAndCollectionTag(dir_path,tag_data,data_report)
    else:
        data_report=generateFalseDataTag(dir_path,false_reason,data_report)
    TransferPost(data_report)
예제 #2
0
def main(dir_path):
    data_tag_list = getMatchedFilePaths(dir_path, recursive=True)
    print(data_tag_list)
    for data_tag_path in data_tag_list:
        tag_data = loadTag(data_tag_path, '')
        if "backup" in tag_data.keys():
            tag_data = tag_data["backup"][0]["data_tag"]
        if "test_date" in tag_data.keys():
            tag_data["test_date"] = tag_data["test_date"].replace('-', '_')
        print(data_tag_path)
        #tag_data["issue_id"] = ["AUTODRIVE-6814"]
        # if tag_data["global_tag"] ==[]:
        #     continue
        # for global_tag in tag_data["global_tag"]:
        #     print(global_tag)
        #     if fnmatch.fnmatch(global_tag,"*_rate") or fnmatch.fnmatch(global_tag, "*_fluction") or fnmatch.fnmatch(global_tag, "*_high") or fnmatch.fnmatch(global_tag, "*_fluction"):
        #     #if str(global_tag).endswith('e') or str(global_tag).endswith('h') or str(global_tag).endswith('n'):
        #
        #         tag_data["global_tag"].remove(global_tag)
        # saveTag(data_tag_path,tag_data,'')
        if None in tag_data['global_tag']:
            tag_data['global_tag'].remove(None)
        print(tag_data['global_tag'])
        result = TransferPost(tag_data)
        if result:
            os.remove(data_tag_path)
예제 #3
0
 def getLocalizationResult(self, dir_path):
     '''get the localization result'''
     localization_result = defaultdict(lambda: {})
     result_file_path = os.path.join(dir_path, 'logs/localization_eval')
     localization_json = loadTag(result_file_path,
                                 tag_file_name='evaluation_result.json')
     if localization_json is not None:
         try:
             localization_result["Grade"] = localization_json["Grade"]
             localization_result["Integrity"] = localization_json[
                 "Integrity"]
             localization_result["Odometer(km)"] = localization_json[
                 "Odometer(km)"]
             localization_result["Setting"] = localization_json["Setting"]
             localization_result["Mileage(km)"] = localization_json.get(
                 "Mileage(km)")
             localization_result["Mileage(km)"] = localization_json.get(
                 "Mileage(km)")
             localization_result["Stability"] = localization_json.get(
                 "Stability")
             localization_result["Tags_Num"] = localization_json.get(
                 "Tags_Num")
             if localization_json.get(
                     "Odometer(km)") > 1.0 and localization_json.get(
                         "Odometer(km)") < 100.0:
                 self.tag_info["test_mileage"] = localization_json.get(
                     "Odometer(km)")
         except Exception as e:
             print(getTime() +
                   "\033[1;31m [ERROR]\033[0m get LMR eval info error ")
     if self.tag_info["test_mileage"] < 1.0 or self.tag_info[
             "test_mileage"] > 100.0:
         self.tag_info["test_mileage"] = 18.0
     return localization_result
예제 #4
0
def main():

    report_json_list = getMatchedFilePaths('../data_report', pattern="*")

    for report_json_path in report_json_list:

        tag = loadTag(report_json_path, '')
        # report_png_path = report_json_path.replace('.json','.png')
        # if os.path.exists(report_png_path):
        #     continue

        date = list(tag.keys())[0]

        table = PrettyTable(
            [' 上传日期 ', ' 核验结果 ', ' 数据名称 ', ' 数据类型 ', ' 数据大小(MB) ', ' 上传路径 '])
        if 'true' in tag[date].keys():
            for i in tag[date]['true']:
                addTableRow(table, tag[date]['true'][i])
        if 'false' in tag[date].keys():
            table.add_row(['', '', '', '', '', ''])
            for i in tag[date]['false']:
                addTableRow(table, tag[date]['false'][i])

        table.align[1] = 'l'
        table.border = True
        table.junction_char = '+'
        table.horizontal_char = '+'
        table.vertical_char = '|'
        createTableImg(table, tag[date], date)
        print(table)
예제 #5
0
    def matchBackUpTag(self, dir_path):
        try:
            dir_name = os.path.basename(dir_path)
            test_time_list = dir_name.split('_')
            if test_time_list[-1] != "AutoCollect":
                return None
            test_time = [int(test_time_list[i]) for i in range(1, 5)]

            for backup_tag in self.backup_tag_list:
                backup_tag_name = os.path.basename(backup_tag)
                tag_time_str = backup_tag_name.split('(')[1].split(')')[0]
                tag_time = tag_time_str.split('-')

                clock_list = [43200, 1440, 60, 1]
                tag_gap = 0
                for i, clock_time in enumerate(clock_list):
                    add_time = 1
                    tag_gap += (test_time[i] - int(tag_time[i])) * clock_time

                if abs(tag_gap) < 4 and add_time > 0:
                    print(getTime() + "\033[1;32m [INFO]\033[0m", test_time,
                          tag_time)
                    tag_data = loadTag(self.input_data_path, backup_tag)
                    return tag_data
            return None
        except Exception as e:
            print(getTime() + "\033[1;31m [ERROR]\033[0m backup tag not found")
def rmAwsFile(tag_path):
    tag_info = loadTag(tag_path, '')
    if tag_info is None:
        print('tag info is error')
        return
    if "backup" in tag_info:
        tag_info = tag_info["backup"][0]["data_tag"]
    print(tag_info['data_link'])
    if 'issue_id' in tag_info.keys() and tag_info['issue_id'] != []:
        for issus_id in tag_info['issue_id']:
            if issus_id in [
                    'AutoDrive-7496', 'AutoDrive-7424', 'AutoDrive-6974',
                    'AutoDrive-6509'
            ]:
                print('++++++++++++++++++++++++ get can not rm file')
                return
    if tag_info['topic_name'] == ['repo_master']:
        print('++++++++++++++++++++++++ get can not rm file')
        return
    data_link = tag_info['data_link']
    print '\n\n ================== ', data_link, '\n'
    rm_cmd = ''.join(['aws s3 rm ', data_link, ' --recursive ', endpoint_url])
    print(rm_cmd)
    global counts_1
    counts_1 += 1
    print(counts_1)
    os.system(rm_cmd)
    os.system(rm_cmd)
예제 #7
0
 def sliceDataCheck(self, dir_path):
     "segmented data check and post"
     data_tag_paths = self.getMatchedFilePaths(dir_path,
                                               pattern="data-ta*",
                                               formats=[".json"],
                                               recursive=True)
     for slice_data_tag_path in data_tag_paths:
         slice_data_path = os.path.split(slice_data_tag_path)[0]
         check_result, false_reason = self.checkRec(slice_data_path,
                                                    slice=True)
         slice_data_tag = loadTag(slice_data_tag_path, '')
         record_tag = slice_data_tag["origin_record_tag"][0]
         slice_data_tag["origin_record_tag"] = \
             [self.case_toss.mainToss(self.local_eval_tag,os.path.split(slice_data_tag_path)[0],
                                      record_tag,slice_data_tag)]
         saveTag(slice_data_tag_path, slice_data_tag, '')
         if self.tag_info['master'] == False:
             pass
         else:
             if not check_result:
                 saveTag('upload_list/false_segment', slice_data_tag,
                         slice_data_tag["file_name"] + '.json')
                 continue
         if self.tag_info['test_type'] == 3 and not os.path.exists(
                 os.path.split(slice_data_tag_path)[0] + '/cv22'):
             continue
         self.TransferPost(slice_data_tag)
예제 #8
0
 def generateTag(self, dir_path):
     generated_tag = loadTag("config/default_data-tag.json", '')
     dir_name = os.path.basename(dir_path)
     generated_tag["file_name"] = dir_name
     if len(dir_name) > 10:
         generated_tag["test_date"] = dir_name[0:10]
     saveTag(dir_path, generated_tag)
     return generated_tag
예제 #9
0
 def deterDirProperty(self, dir_path):
     "determine file property according the tag"
     tag_data = loadTag(dir_path)
     tag_list = ["origin_record_tag", "task_id", "test_car_id", "issue_id"]
     if tag_data is None or any(tag not in tag_data for tag in tag_list):
         tag_data = self.matchBackUpTag(dir_path)
         if tag_data is None or tag_data == []:
             tag_data = self.generateTag(dir_path)
     return tag_data
def generateFeatureAndCollectionTag(dir_path,tag_data,data_report):

    data_report = loadTag('config', 'data_report_daily.json')
    data_report['data_link'] = tag_data['data_link']
    data_report['upload_date'] = upload_date
    data_report['file_name'] = tag_data['file_name']
    if not 'test_duration' in tag_data.keys():
        tag_data['test_duration'] = 2250
    data_report['test_duration'] = tag_data['test_duration']
    data_report['file_size'] = getFileSize(dir_path)
    data_report['master'] = tag_data['master']
    return data_report
예제 #11
0
    def __init__(self, path):
        self.input_data_path = path
        self.file_list = deque()
        self.getAllDataDir()
        self.post = True
        self.backup_tag_list = self.getAllBackUpTag(path)
        self.tag_info = defaultdict(lambda: {})

        self.check_true_file_list = []
        self.check_false_file_list = []
        self.false_check_reasion = []

        self.auto_module_ = loadTag('config/auto_module.json', '')
        self.config_ = loadTag('config/data_pipeline_config.json', '')
        self.end_point_30 = self.config_["end_point_30"]
        self.end_point_40 = self.config_["end_point_40"]
        self.end_point_21 = self.config_["end_point_21"]
        self.end_point = self.end_point_21
        self.check_file_name_list = self.config_["check_file"]

        self.headerdata = {"Data-tag-type": "application/json"}
        self.tag_module_list = loadTag(
            tag_file_name='config/tag_module.json'
        )  # special manual tagging, take over, dangerous driving etc
        self.tprofile_thresh = loadTag('config/tprofile_thresh.json', '')
        self.readShellFile('config/download_logs.sh')
        self.pool = ThreadPool(int(multiprocessing.cpu_count() * 0.6))
        self.auto_module_ = loadTag('config/auto_module.json', '')
        self.pred_eval_thresh = loadTag('config/pred_eval_thresh.json', '')
        self.case_tagging = TaggingMain(self.pool, self.config_,
                                        self.auto_module_,
                                        self.tag_module_list)
        self.case_toss = TossMain(self.config_, self.auto_module_,
                                  self.pred_eval_thresh)
def main():
    statics_result = {}
    vnumber = {"fewer_objects": 0, "normal_objects": 0, "many_objects": 0}
    dir_path = "/media/sensetime/FieldTest1/data/04_10_CN-013_ARH/"
    tag_list = getMatchedFilePaths(dir_path, 'cpu_resu*', '.json', True)

    print(len(tag_list))
    i = 0
    result_list = []
    for tag_path in tag_list:
        i += 1
        tag_data = loadTag(tag_path, '')
        if tag_data is None:
            continue

        if "backup" in tag_data.keys():
            tag_data = tag_data["backup"][0]["data_tag"]
            tag_data["test_date"] = tag_data["test_date"].replace('-', '_')
        print(tag_data)
        for core in tag_data:
            if core not in statics_result:
                statics_result[core] = 0
            statics_result[core] += tag_data[core]

        # for core in tag_data["sensor_eval"]:
        #     if core not in statics_result:
        #         statics_result[core]={}
        #         statics_result[core]["stddev_frame_rate"] = 0
        #         statics_result[core]["avg_frame_rate"] = 0
        #     statics_result[core]["stddev_frame_rate"] += tag_data["sensor_eval"][core]["stddev_frame_rate"]
        #     statics_result[core]["avg_frame_rate"] += tag_data["sensor_eval"][core]["avg_frame_rate"]
        # for core in tag_data["module_eval"]:
        #     if core not in statics_result:
        #         statics_result[core] = {}
        #         statics_result[core]["stddev_frame_rate"] = 0
        #         statics_result[core]["avg_frame_rate"] = 0
        #     statics_result[core]["stddev_frame_rate"] += tag_data["module_eval"][core]["stddev_frame_rate"]
        #     statics_result[core]["avg_frame_rate"] += tag_data["module_eval"][core]["avg_frame_rate"]

        print(i)
    for core in statics_result:
        # statics_result[core]["stddev_frame_rate"] = statics_result[core]["stddev_frame_rate"]/i
        # statics_result[core]["avg_frame_rate"] = statics_result[core]["avg_frame_rate"] / i
        statics_result[core] = statics_result[core] / i
    saveTag(dir_path, statics_result, 'result.json')

    saveTag(dir_path, vnumber, 'statics_result_12.json')
예제 #13
0
    def getObjectNumber(self, dir_path, tag_info):

        object_count = loadTag(dir_path, 'object_count.json')
        number = 0
        for count in object_count["Object_number"]:
            for a in count:
                number += count[a]
        object_size = number / len(object_count["Object_number"])

        if object_size < 800:
            tag_info['global_tag'].append("fewer_objects")
        elif object_size > 800 and object_size < 1800:
            tag_info['global_tag'].append("normal_objects")
        elif object_size > 1800:
            tag_info['global_tag'].append("many_objects")
        tag_info['global_tag'] = list(set(self.tag_info['global_tag']))
        return tag_info
예제 #14
0
 def reouteCheckFi(self, input_path, vehicle_id):
     try:
         rec_parser = os.path.join(
             self.config_["senseauto_path"],
             "tools/rec_parser/scripts/offline_data_process.sh")
         output_path = os.path.join(input_path, 'rec_parser')
         config_path = os.path.join(self.config_["senseauto_path"],
                                    "system/config/vehicle/", vehicle_id)
         enu_config_path = os.path.join(
             self.config_["senseauto_path"],
             "tools/rec_parser/config/sh_enu_origin.txt")
         rec_parse_cmd = "bash {} -i {} -o {} -c {} -e {}".format(
             rec_parser, input_path, output_path, config_path,
             enu_config_path)
         gps_file = os.path.join(output_path, 'gps.txt')
         os.system(rec_parse_cmd)
         with open(gps_file, 'r') as r:
             lines = r.readlines()
         with open(gps_file, 'w') as fw:
             for l in lines:
                 if "GPS-time" in l:
                     continue
                 items = l.split(',')[1:3]
                 a = items[1]
                 items[1] = items[0].replace("\n", "")
                 items[0] = a.replace("\n", "")
                 items = ''.join([
                     "unknown unknown unknown unknown unknown ",
                     str(items[0]), ', ',
                     str(items[1]) + ", unknown\n"
                 ])
                 fw.write(items)
         os.system(
             "python3 ~/Codes/RouteMeFi-master/routemefi.py --input_path " +
             gps_file +
             " --db_path ~/Codes/sh_gps/database.txt --save_dir ~/Codes/sh_gps/ --query_merge"
         )
         record_tag = {}
         record_tag["input_dir"] = input_path
         record_tag["tagging_module"] = 0
         self.case_tagging.tag_main(self.tag_info['global_tag'], input_path,
                                    [record_tag])
         overlap_json = loadTag(output_path, 'overlap_result.json')
         return 'overlap_' + str(overlap_json["overlapped"])
     except Exception as e:
         print(getTime() + "\033[1;31m [ERROR]\033[0m get ins info error ")
예제 #15
0
    def getPredictionResult(self, dir_path):
        "add the control evaluation result to tag"

        ap_result = defaultdict(lambda: {})
        result_file_path = os.path.join(dir_path,
                                        'prediction_evaluation/result')
        ap_json = loadTag(result_file_path, tag_file_name='result.json')

        if ap_json is not None:
            try:
                ap_result["quality"] = ap_json["quality"]
            except Exception as e:
                logger(1, str(e), LOG_FILE="upload_list/error.log")
                print(getTime() +
                      "\033[1;31m [ERROR]\033[0m get ap eval info error ")
                ap_result["quality"] = {}
                ap_result["quality"]['level'] = "bad"
        return ap_result
예제 #16
0
 def falseDataUpload(self, dir_path):
     false_data_tag = loadTag(dir_path, 'data_tag.json')
     dir_name = os.path.split(dir_path)[1]
     upload_path = ''.join([dir_path, '/ '])
     dst_path = ''.join([
         self.config_["upload_path"]["false"], data_month, '/false_data/',
         dir_name
     ])
     arg2 = ''.join([
         "s3 cp ", upload_path, dst_path, upload_recursive + self.end_point
     ])
     print(arg2)
     AWS_DRIVER.main(arg2.split())
     if false_data_tag is not None and false_data_tag["issue_id"] != [
             "repo_master"
     ]:
         false_data_tag["data_link"] = dst_path
         false_data_tag["data_type"] = "raw"
         self.TransferPost(self, false_data_tag)
예제 #17
0
    def getControlResult(self, dir_path):
        "add the control evaluation result to tag"

        control_result = defaultdict(lambda: {})
        result_file_path = os.path.join(dir_path, 'logs/control_eval')
        control_json = loadTag(result_file_path,
                               tag_file_name='control_eval_results.json')

        if control_json is not None:
            try:
                control_result = control_json["control_result"]
                if 'stop_error' in control_result['control_precision']:
                    control_result['control_precision']['stop_error'][
                        'std'] = 0.901
            except Exception as e:
                logger(1, str(e), LOG_FILE="upload_list/error.log")
                print(getTime() +
                      "\033[1;31m [ERROR]\033[0m get control eval info error ")
        return control_result
def getFalseList(file_path, check_result=False):
    '''

    :param input_file_list:
    :param check:
    :return:
    '''

    # if not judge_file_data(file_path):
    #     return
    data_report_tag = loadTag('../data_report/', formatted_today + '.json')
    date = str(formatted_today)
    total_file_size = 0
    print('===', date)
    if not date in data_report_tag.keys():
        data_report_tag[date] = defaultdict(lambda: {})

    file_name = file_path.split('/', -1)[-1]

    file_size = getFileSize(file_path)
    if not 'false' in data_report_tag[date]:
        data_report_tag[date]['false'] = defaultdict(lambda: {})
    if not file_name in data_report_tag[date]['false']:
        data_report_tag[date]['false'][file_name] = defaultdict(lambda: {})
    data_report_tag[date]['false'][file_name]["upload_date"] = date
    data_report_tag[date]['false'][file_name]["data_type"] = "raw"
    data_report_tag[date]['false'][file_name]["data_name"] = file_name
    data_report_tag[date]['false'][file_name]["check_result"] = check_result
    data_report_tag[date]['false'][file_name]["file_size(MB)"] = file_size
    data_report_tag[date]['false'][file_name]["data_link"] = ''.join(["s3://sh40_fieldtest_dataset/",data_month,'/false_data/', file_name])

    total_file_size += file_size

    file_size = str(check_result) + "_file_size(MB)"
    if not file_size in data_report_tag[date].keys():
        data_report_tag[date][file_size] = 0
    data_report_tag[date][file_size] += total_file_size

    print("\033[1;32m [INFO]\033[0m! generate data report successfully\n")
    saveTag('../data_report/', data_report_tag, formatted_today + '.json')
def getObjectNumber(dir_path):

    object_count = loadTag(dir_path, 'object_count.json')
    number = []
    try:
        for count in object_count["Object_number"]:
            for a in count:
                number.append(count[a])
        #object_size = number / len(object_count["Object_number"])
    except:
        return 0

    # if object_size < 800:
    #     tag_info['global_tag'].append("fewer_objects")
    # elif object_size > 800 and object_size < 1800:
    #     tag_info['global_tag'].append("normal_objects")
    # elif object_size > 1800:
    #     tag_info['global_tag'].append("many_objects")
    return number


    saveTag(dir_path,vnumber,'statics_result_12.json')
예제 #20
0
 def mainUpload(self, dir_path, upload):
     '''
     upload and then archive data
     :param dir_path_without: data path end without /
     '''
     print("checkpoint36")
     print(dir_path)
     tag_info = loadTag(dir_path)
     print(tag_info)
     dir_name = os.path.split(dir_path)[1]
     print(dir_name)
     try:
         self.data_upload(dir_path, tag_info, slice=False, upload=upload)
         archive_path = self.dirArchive(dir_path, tag_info)
         generate_data_report.main(archive_path + '/' + dir_name, True)
         if os.path.exists(dir_path + "_slice/"):
             self.data_upload(dir_path, tag_info, slice=True, upload=upload)
             self.dirArchive(dir_path + "_slice", tag_info)
         return 0
     except Exception as e:
         print(traceback.format_exc())
         logger(1, str(e), LOG_FILE="upload_list/error.log")
         print(getTime() + "\033[1;31m [ERROR]\033[0m upload error ")
         return 1
def generateLogDownloadFile(input_path):
    tag_path = ''.join([input_path, 'data-tag.json'])
    file_name = input_path.split('/',-1)[-2]
    download_path = ''.join(["/media/sensetime/FieldTest1/data/logs_ARH/",file_name,'/data-tag.json'])
    download_command = ''.join([download_cmd,tag_path,' ',download_path])
    os.system(download_command)
    tag=loadTag(download_path,'')
    if tag is None:
        return
    raw_data_link = tag["raw_data_link"]
    output_path =  ''.join(["/media/sensetime/FieldTest1/data/logs_ARH/",file_name])
    if not os.path.exists(output_path + "/logs"):
        os.makedirs(output_path + "/logs")
    profile = " --profile ad_system_common "
    log_download_instruction = ''.join(
        ["aws s3 cp", upload_recursive, "http://10.5.41.189:9090", profile, raw_data_link + '/logs', ' ', "$this_dir/"])
    download_logs = download_aws
    download_logs += log_download_instruction
    download_logs += '\n'
    with open(os.path.join(output_path, 'logs/download_logs.sh'), 'w') as f:
        f.write(download_logs)
    file_path = ''.join(["/media/sensetime/FieldTest1/data/logs_ARH/",file_name])
    upload_command = ''.join([download_cmd,file_path+'/logs/',' ',input_path+'logs/',' --recursive'])
    os.system(upload_command)
예제 #22
0
    def checkRec(self, dir_path, slice=False):
        # looks through the following folders: cache, config, cv22, logs, screen_cast, sensor_logs
        #    sensors_record, timing logger, versions
        print("checkpoint25")
        print(dir_path)
        '''
        :param dir_path: data path end with /
        :param slice: define the checking data is raw data or segment data
        :return: True of False
        '''
        video_result = 0
        check_result = 0
        try:
            check_tag = loadTag(dir_path, 'data-tag.json')
            if check_tag is not None and check_tag["test_car_id"] == "CN-001":
                return True, []
            if check_tag is not None and (check_tag["test_car_id"] == "CN-006"
                                          or check_tag["test_car_id"]
                                          == "CN-007"):
                check_result += 1
        except Exception as e:
            print(getTime() + "\033[1;31m [ERROR]\033[0m not CN-001 data ")

        self.false_check_reasion = []
        false_reason = []
        if not os.path.exists(dir_path):
            return False, false_reason
        for file_name in self.check_file_name_list:
            print(file_name)
            check_size = self.check_file_name_list[file_name]
            print("checkpoint21")
            print(dir_path)
            reult_0_1 = self.judgeFileSizeAndExist(dir_path,
                                                   file_name,
                                                   check_size=check_size)
            print(file_name)
            print(reult_0_1)
            print("checkpoint22")
            check_result += reult_0_1
            print(check_result)
            if reult_0_1 == 0:
                false_reason.append(file_name)
        pattern = "port_*"

        video_files = self.getMatchedFilePaths(
            dir_path,
            pattern,
            formats=[".avi", ".h264", "mp4"],
            recursive=True)

        for video_name in video_files:
            print("checkpoint23")
            video_reult_0_1 = self.judgeFileSizeAndExist(dir_path='',
                                                         file_name=video_name,
                                                         check_size=1)
            print("checkpoint24")
            print(video_reult_0_1)
            video_result += video_reult_0_1
            if video_reult_0_1 == 0:
                false_reason.append(file_name)

        for dir_name in ['cache', 'config', 'logs', 'timing_logger']:
            log_dir_path = os.path.join(dir_path, dir_name)
            if not os.path.exists(log_dir_path):
                false_reason.append("cache")

        if check_result >= 4 and video_result > 0:
            print("checkpoint27")
            print(getTime() + "\033[1;32m [INFO]\033[0m Dir:", dir_path,
                  "is \033[1;32m correct\033[0m")
            return True, false_reason
        else:
            print("checkpoint26")
            print(check_result)
            print(video_result)
            print(getTime() + "\033[1;31m [ERROR]\033[0m Dir:", dir_path,
                  "is\033[1;31m wrong\033[0m")
            return False, false_reason
예제 #23
0
def main():
    input_path = '/media/sensetime/FieldTest/data/local_eval/true_label'
    file_list = getAllDataDir(input_path)

    for file_name in file_list:
        eval_file_path = os.path.join(input_path, file_name,
                                      'localization_eval')
        tag_file_path = os.path.join(input_path, file_name)
        eval_info = loadTag(eval_file_path, 'evaluation_result.json')
        tag_info = loadTag(tag_file_path, '12.json')

        for tag in tag_info["tags"]:
            print(file_name)
            save_dict = {}
            start = int(str(int(tag["start"] * 1000))[0:11]) - 25
            if "end" in tag:
                end = int(str(int(tag["end"] * 1000))[0:11]) + 25
            else:
                end = start + 50
            for eval in eval_info["Tags"]:
                eval_start = int(str(int(eval["start"] * 1000))[0:11])
                eval_end = int(str(int(eval["end"] * 1000))[0:11])
                if not judgeIfOverLap(start, end, eval_start, eval_end):
                    continue

                if eval["tag_en"] == "longitudinal_jump":
                    if not eval["tag_en"] in save_dict:
                        save_dict[eval["tag_en"]] = eval[
                            "max_longitudinal_jump(m/s)"]
                    else:
                        if save_dict[eval["tag_en"]] < eval[
                                "max_longitudinal_jump(m/s)"]:
                            save_dict[eval["tag_en"]] = eval[
                                "max_longitudinal_jump(m/s)"]
                elif eval["tag_en"] == "lateral_jump":
                    if not eval["tag_en"] in save_dict:
                        save_dict[
                            eval["tag_en"]] = eval["max_lateral_jump(m/s)"]
                    else:
                        if save_dict[eval["tag_en"]] < eval[
                                "max_lateral_jump(m/s)"]:
                            save_dict[
                                eval["tag_en"]] = eval["max_lateral_jump(m/s)"]
                elif eval["tag_en"] == "heading_jump":
                    if not eval["tag_en"] in save_dict:
                        save_dict[
                            eval["tag_en"]] = eval["max_heading_jump(deg/s)"]
                    else:
                        if save_dict[eval["tag_en"]] < eval[
                                "max_heading_jump(deg/s)"]:
                            save_dict[eval["tag_en"]] = eval[
                                "max_heading_jump(deg/s)"]
                elif eval["tag_en"] == "large_longitudinal_error":
                    if not eval["tag_en"] in save_dict:
                        save_dict[
                            eval["tag_en"]] = eval["max_longitudinal_error(m)"]
                    else:
                        if save_dict[eval["tag_en"]] < eval[
                                "max_longitudinal_error(m)"]:
                            save_dict[eval["tag_en"]] = eval[
                                "max_longitudinal_error(m)"]
                elif eval["tag_en"] == "large_lateral_error":
                    if not eval["tag_en"] in save_dict:
                        save_dict[
                            eval["tag_en"]] = eval["max_lateral_error(m)"]
                    else:
                        if save_dict[
                                eval["tag_en"]] < eval["max_lateral_error(m)"]:
                            save_dict[
                                eval["tag_en"]] = eval["max_lateral_error(m)"]
                elif eval["tag_en"] == "large_heading_error":
                    error_num = "max_heading_error(m)"
                    for key in eval.keys():
                        if key.split('(')[0] == "max_heading_error":
                            error_num = key
                    if not eval["tag_en"] in save_dict:

                        save_dict[eval["tag_en"]] = eval[error_num]
                    else:
                        if save_dict[eval["tag_en"]] < eval[error_num]:
                            save_dict[eval["tag_en"]] = eval[error_num]
            saveTag(tag_file_path, save_dict,
                    tag["start_format"].replace(':', "_") + '.json')

        plot_dict = {
            "longitudinal_jump": [],
            "lateral_jump": [],
            "heading_jump": [],
            "large_longitudinal_error": [],
            "large_lateral_error": [],
            "large_heading_error": [],
            "large_time_gap": []
        }
        tag_file_path = os.path.join(input_path, file_name,
                                     'localization_eval')
        eval_tag = loadTag(tag_file_path, 'evaluation_result.json')
        if eval_tag is None:
            continue
        for tag in eval_tag["Tags"]:
            print(file_name)
            print(tag)
            st = int(str(int(tag["start"] * 1000))[0:11]) - 1599000000
            ed = int(str(int(tag["end"] * 1000))[0:11]) - 1599000000
            plot_dict[tag["tag_en"]].append([st, ed])
        plt = plotMap(plot_dict)
        plt.savefig(tag_file_path + "/tag_file.png", dpi=120)
예제 #24
0
def main():
    statics_result ={"test_duration":0,
                     "test_mileage":0,
                     'route':{

                     },
                     "record_tag":{
                     }}
    vnumber = {
        "fewer_objects":0,
        "normal_objects":0,
        "many_objects":0
    }
    dir_path = "/media/sensetime/FieldTest1/data/04_02_CN-009_ARH/"
    tag_list = getMatchedFilePaths(dir_path,'module_*','.json',True)

    print(len(tag_list))
    result_list = {}
    i = 0
    for tag_path in tag_list:
        tag_data = loadTag(tag_path,'')
        if tag_data is None:
            continue

        for cpu_core in tag_data["module_eval"]:
            if cpu_core not in result_list:
                result_list[cpu_core]= {}
                result_list[cpu_core]["stddev_frame_rate"] =0
                result_list[cpu_core]["avg_frame_rate"] = 0
            if tag_data["module_eval"][cpu_core]["stddev_frame_rate"] is not None and \
                    tag_data["module_eval"][cpu_core]["avg_frame_rate"] is not None:
                result_list[cpu_core]["stddev_frame_rate"] = \
                    result_list[cpu_core]["stddev_frame_rate"]+tag_data["module_eval"][cpu_core]["stddev_frame_rate"]
                result_list[cpu_core]["avg_frame_rate"] = \
                    result_list[cpu_core]["avg_frame_rate"] + tag_data["module_eval"][cpu_core]["avg_frame_rate"]


        # if "test_duration" in tag_data.keys():
        #     statics_result["test_duration"]+=tag_data["test_duration"]
        # if "test_mileage" in tag_data.keys() and tag_data["test_mileage"]>0 and tag_data["test_mileage"]<200:
        #     statics_result["test_mileage"]+=tag_data["test_mileage"]
        # if "origin_record_tag" in tag_data.keys() and tag_data["origin_record_tag"] != []:
        #     for record_tag in tag_data["origin_record_tag"]:
        #         if not record_tag["tag_en"] in statics_result["record_tag"].keys():
        #             statics_result["record_tag"][record_tag["tag_en"]] = 0
        #         statics_result["record_tag"][record_tag["tag_en"]] +=1
        # if 'route' in tag_data.keys():
        #     if tag_data['route'] not in statics_result["route"]:
        #         statics_result["route"][tag_data['route']] =0
        #     statics_result["route"][tag_data['route']] += 1

        i+=1
        print(i)
    for core in result_list:
        result_list[core]["avg_frame_rate"] =  result_list[core]["avg_frame_rate"]/5
        result_list[core]["stddev_frame_rate"] = result_list[core]["stddev_frame_rate"] / 5
    saveTag(dir_path,result_list,'result.json')
        # if "global_tag" in tag_data.keys():
        #     for tag in tag_data["global_tag"]:
        #         if tag in ["fewer_objects","normal_objects","many_objects"]:
        #             vnumber[tag] +=1


    # with open('/media/sensetime/FieldTest1/data/result.txt', 'w') as fw:
    #     for path in result_list:
    #         fw.write(path + '\n')




    saveTag(dir_path,vnumber,'statics_result_12.json')
def main():
    statics_result = {
        "test_duration": 0,
        "test_mileage": 0,
        'route': {},
        "record_tag": {}
    }
    vnumber = {"fewer_objects": 0, "normal_objects": 0, "many_objects": 0}
    dir_path = "/media/sensetime/FieldTest2/data/123/"
    tag_list = getMatchedFilePaths(dir_path, 'data*', '.json', True)

    print(len(tag_list))
    i = 0
    result_list = []
    for tag_path in tag_list:
        tag_data = loadTag(tag_path, '')
        if tag_data is None:
            continue
        # for count in tag_data["Object_number"]:
        #     for a in count:
        #         if count[a] < 800:
        #             vnumber['fewer_objects'] += 1
        #         elif count[a] > 800 and count[a] < 1800:
        #             vnumber['normal_objects'] += 1
        #         elif count[a] > 1800:
        #             vnumber['many_objects'] += 1
        # if "ego_tags" in tag_data.keys() and  "CloseToJunction" in tag_data["ego_tags"]:
        #     result_list.append(os.path.basename(os.path.split(tag_path.split('screen_cast')[0])[0]))
        #     print(result_list)

        if "backup" in tag_data.keys():
            tag_data = tag_data["backup"][0]["data_tag"]
            tag_data["test_date"] = tag_data["test_date"].replace('-', '_')
        # try:
        #     if tag_data["test_date"].split('_',-1)[0] +'_'+tag_data["test_date"].split('_',-1)[1]!= "2020_09":
        #         continue
        # except Exception as e:
        #     continue
        # if tag_data["test_car_id"] != "CN-012":
        #     continue
        # if tag_data["test_type"] != 2:
        #     continue
        #print(tag_path)
        if "test_duration" in tag_data.keys():
            statics_result["test_duration"] += tag_data["test_duration"]
        if "test_mileage" in tag_data.keys(
        ) and tag_data["test_mileage"] > 0 and tag_data["test_mileage"] < 200:
            statics_result["test_mileage"] += tag_data["test_mileage"]
        if "origin_record_tag" in tag_data.keys(
        ) and tag_data["origin_record_tag"] != []:
            for record_tag in tag_data["origin_record_tag"]:
                if not record_tag["tag_en"] in statics_result[
                        "record_tag"].keys():
                    statics_result["record_tag"][record_tag["tag_en"]] = 0
                statics_result["record_tag"][record_tag["tag_en"]] += 1
        if 'route' in tag_data.keys():
            if tag_data['route'] not in statics_result["route"]:
                statics_result["route"][tag_data['route']] = 0
            statics_result["route"][tag_data['route']] += 1
        i += 1
        print(i)
    saveTag(dir_path, statics_result, 'result.json')
    # if "global_tag" in tag_data.keys():
    #     for tag in tag_data["global_tag"]:
    #         if tag in ["fewer_objects","normal_objects","many_objects"]:
    #             vnumber[tag] +=1

    # with open('/media/sensetime/FieldTest1/data/result.txt', 'w') as fw:
    #     for path in result_list:
    #         fw.write(path + '\n')

    saveTag(dir_path, vnumber, 'statics_result_12.json')
def getTrueList(file_path, check_result=True):
    '''

    :param input_file_list:
    :param check:
    :return:
    '''
    # if not judge_file_data(file_path):
    #     return
    if not os.path.exists('../data_report'):
        os.makedirs('../data_report')
    data_report_tag = loadTag('../data_report/', formatted_today + '.json')
    date = str(formatted_today)
    total_file_size = 0
    total_file_time_length = 0

    if not date in data_report_tag.keys():
        data_report_tag[date] = defaultdict(lambda: {})

    file_name = file_path.split('/', -1)[-1]
    tag_data = loadTag(file_path)
    if tag_data == {}:
        return

    file_size = getFileSize(file_path)
    if 'test_duration' in tag_data.keys():
        file_time_length = tag_data.get('test_duration')
    else:
        file_time_length = 1800
    if not 'true' in data_report_tag[date]:
        data_report_tag[date]['true'] = defaultdict(lambda: {})
    if not file_name in data_report_tag[date]['true']:
        data_report_tag[date]['true'][file_name] = defaultdict(lambda: {})
    data_report_tag[date]['true'][file_name]["upload_date"] = date
    data_report_tag[date]['true'][file_name]["data_type"] = 'raw'
    data_report_tag[date]['true'][file_name]["data_name"] = file_name
    data_report_tag[date]['true'][file_name]["test_car_id"] = tag_data["test_car_id"]
    data_report_tag[date]['true'][file_name]["check_result"] = check_result
    data_report_tag[date]['true'][file_name]["file_size(MB)"] = file_size
    data_report_tag[date]['true'][file_name]["data_link"] = tag_data["data_link"]

    total_file_size += file_size
    total_file_time_length += file_time_length / 60

    print(file_path + '_slice')
    if os.path.exists(file_path + '_slice'):
        slice_file_size = getFileSize(file_path + '_slice')
        slice_file_name = file_name + '_slice'
        if not slice_file_name in data_report_tag[date]['true']:
            data_report_tag[date]['true'][slice_file_name] = defaultdict(lambda: {})
        data_report_tag[date]['true'][slice_file_name]["upload_date"] = date
        data_report_tag[date]['true'][slice_file_name]["data_type"] = 'segment'
        data_report_tag[date]['true'][slice_file_name]["data_name"] = slice_file_name
        data_report_tag[date]['true'][file_name]["test_car_id"] = tag_data["test_car_id"]
        data_report_tag[date]['true'][slice_file_name]["check_result"] = check_result
        data_report_tag[date]['true'][slice_file_name]["file_size(MB)"] = slice_file_size
        data_report_tag[date]['true'][slice_file_name]["data_link"] = tag_data["data_link"].replace('raw',
                                                                                                    'segment') + '_slice'

        total_file_size += slice_file_size

    file_size_str = str(check_result) + "_file_size(MB)"
    if not file_size_str in data_report_tag[date].keys():
        data_report_tag[date][file_size_str] = 0
    data_report_tag[date][file_size_str] += total_file_size

    time_length_str = "Test_length(Min)"
    if not time_length_str in data_report_tag[date].keys():
        data_report_tag[date][time_length_str] = 0
    data_report_tag[date][time_length_str] += total_file_time_length

    print("\033[1;32m [INFO]\033[0m! generate data report successfully\n")
    saveTag('../data_report/', data_report_tag, formatted_today + '.json')