Esempio n. 1
0
    def matchBackUpTag(self, dir_path):
        try:
            dir_name = os.path.basename(dir_path)
            test_time_list = dir_name.split('_')
            if test_time_list[-1] != "AutoCollect":
                return None
            test_time = [int(test_time_list[i]) for i in range(1, 5)]

            for backup_tag in self.backup_tag_list:
                backup_tag_name = os.path.basename(backup_tag)
                tag_time_str = backup_tag_name.split('(')[1].split(')')[0]
                tag_time = tag_time_str.split('-')

                clock_list = [43200, 1440, 60, 1]
                tag_gap = 0
                for i, clock_time in enumerate(clock_list):
                    add_time = 1
                    tag_gap += (test_time[i] - int(tag_time[i])) * clock_time

                if abs(tag_gap) < 4 and add_time > 0:
                    print(getTime() + "\033[1;32m [INFO]\033[0m", test_time,
                          tag_time)
                    tag_data = loadTag(self.input_data_path, backup_tag)
                    return tag_data
            return None
        except Exception as e:
            print(getTime() + "\033[1;31m [ERROR]\033[0m backup tag not found")
Esempio n. 2
0
    def main(self, upload=False):
        '''
        main pipeline for collected data check & segment & upload
        :param upload: whether upload
        '''

        start_time = time.time()
        print(getTime() + "\033[1;32m [INFO]\033[0m Starting EVERYTHING")
        self.mainSegment(self.file_list, upload)
        print(
            getTime() +
            "\033[1;32m [INFO]\033[0m Finished everything , consuming {:.3f}s".
            format(time.time() - start_time))
Esempio n. 3
0
 def getLocalizationResult(self, dir_path):
     '''get the localization result'''
     localization_result = defaultdict(lambda: {})
     result_file_path = os.path.join(dir_path, 'logs/localization_eval')
     localization_json = loadTag(result_file_path,
                                 tag_file_name='evaluation_result.json')
     if localization_json is not None:
         try:
             localization_result["Grade"] = localization_json["Grade"]
             localization_result["Integrity"] = localization_json[
                 "Integrity"]
             localization_result["Odometer(km)"] = localization_json[
                 "Odometer(km)"]
             localization_result["Setting"] = localization_json["Setting"]
             localization_result["Mileage(km)"] = localization_json.get(
                 "Mileage(km)")
             localization_result["Mileage(km)"] = localization_json.get(
                 "Mileage(km)")
             localization_result["Stability"] = localization_json.get(
                 "Stability")
             localization_result["Tags_Num"] = localization_json.get(
                 "Tags_Num")
             if localization_json.get(
                     "Odometer(km)") > 1.0 and localization_json.get(
                         "Odometer(km)") < 100.0:
                 self.tag_info["test_mileage"] = localization_json.get(
                     "Odometer(km)")
         except Exception as e:
             print(getTime() +
                   "\033[1;31m [ERROR]\033[0m get LMR eval info error ")
     if self.tag_info["test_mileage"] < 1.0 or self.tag_info[
             "test_mileage"] > 100.0:
         self.tag_info["test_mileage"] = 18.0
     return localization_result
Esempio n. 4
0
 def TransferPost(self, data_tag):
     "post the data tag to senseFT"
     curl = \
         'https://fieldtest.sensetime.com/task/daemon/update/tag'
     post_result = requests.post(curl,
                                 headers=self.headerdata,
                                 data=json.dumps(data_tag))
     print(getTime() + "\033[1;32m [INFO]\033[0m ", post_result.text, '\n')
     if post_result.text != u'{"result":"true"}':
         try:
             logger(2,
                    "Uploaded dir:" + data_tag["data_link"],
                    LOG_FILE="upload_list/post_failed_file.log")
             saveTag('upload_list/', data_tag,
                     data_tag["file_name"] + '.json')
         except Exception as e:
             print(getTime() +
                   "\033[1;31m [ERROR]\033[0m save post-failed tag error ")
Esempio n. 5
0
 def falseDataArchive(self, dir_path, false_reason):
     try:
         #generate_data_report.main(dir_path, False, false_reason)
         self.falseDataUpload(dir_path)
         self.dirArchive(dir_path, tag_info=None, check_result=False)
         return 1
     except Exception as e:
         logger(1, str(e), LOG_FILE="upload_list/error.log")
         print(getTime() + "\033[1;31m [ERROR]\033[0m archive error ")
     return 1
Esempio n. 6
0
    def dirArchive(self, dir_path, tag_info, check_result=True):
        "archive the dri which has been uploaded"
        input_data_path, dir_name = os.path.split(dir_path)

        if not check_result:
            try:
                date_id = ''.join([
                    '/False_',
                    dir_name.split('_', -1)[1], '_',
                    dir_name.split('_', -1)[2]
                ])
            except Exception as e:
                date_id = '/False_ARH'
            archive_path = ''.join([input_data_path, date_id, '_ARH'])
            if not os.path.exists(archive_path):
                os.makedirs(archive_path)
            try:
                shutil.move(input_data_path + '/' + dir_name, archive_path)

            except Exception as e:
                print("checkpoint1001")
                print(getTime() +
                      "\033[1;31m [ERROR]\033[0m move dir to ARH failed ")
            return archive_path
        else:
            test_date = tag_info["test_date"].split('_', 1)[1]
            try:
                date_id = ''.join(
                    ['/', test_date, '_', tag_info["test_car_id"]])
            except Exception as e:
                date_id = 'True_ARH'
            archive_path = ''.join([input_data_path, date_id, '_ARH'])
            if not os.path.exists(archive_path):
                os.makedirs(archive_path)
            try:
                shutil.move(''.join([input_data_path, '/', dir_name]),
                            archive_path)
            except Exception as e:
                print("checkpoint1002")
                print(getTime() +
                      "\033[1;31m [ERROR]\033[0m move dir to ARH failed ")
            return archive_path
Esempio n. 7
0
 def reouteCheckFi(self, input_path, vehicle_id):
     try:
         rec_parser = os.path.join(
             self.config_["senseauto_path"],
             "tools/rec_parser/scripts/offline_data_process.sh")
         output_path = os.path.join(input_path, 'rec_parser')
         config_path = os.path.join(self.config_["senseauto_path"],
                                    "system/config/vehicle/", vehicle_id)
         enu_config_path = os.path.join(
             self.config_["senseauto_path"],
             "tools/rec_parser/config/sh_enu_origin.txt")
         rec_parse_cmd = "bash {} -i {} -o {} -c {} -e {}".format(
             rec_parser, input_path, output_path, config_path,
             enu_config_path)
         gps_file = os.path.join(output_path, 'gps.txt')
         os.system(rec_parse_cmd)
         with open(gps_file, 'r') as r:
             lines = r.readlines()
         with open(gps_file, 'w') as fw:
             for l in lines:
                 if "GPS-time" in l:
                     continue
                 items = l.split(',')[1:3]
                 a = items[1]
                 items[1] = items[0].replace("\n", "")
                 items[0] = a.replace("\n", "")
                 items = ''.join([
                     "unknown unknown unknown unknown unknown ",
                     str(items[0]), ', ',
                     str(items[1]) + ", unknown\n"
                 ])
                 fw.write(items)
         os.system(
             "python3 ~/Codes/RouteMeFi-master/routemefi.py --input_path " +
             gps_file +
             " --db_path ~/Codes/sh_gps/database.txt --save_dir ~/Codes/sh_gps/ --query_merge"
         )
         record_tag = {}
         record_tag["input_dir"] = input_path
         record_tag["tagging_module"] = 0
         self.case_tagging.tag_main(self.tag_info['global_tag'], input_path,
                                    [record_tag])
         overlap_json = loadTag(output_path, 'overlap_result.json')
         return 'overlap_' + str(overlap_json["overlapped"])
     except Exception as e:
         print(getTime() + "\033[1;31m [ERROR]\033[0m get ins info error ")
Esempio n. 8
0
 def judgeFileSizeAndExist(self, dir_path, file_name, check_size=0.2):
     "as the function name descripted"
     judge_file = os.path.join(dir_path, file_name)
     print("checkfilereleaseandsize is writing this :)")
     print(file_name)
     print(judge_file)
     if os.path.exists(judge_file) and \
             round(os.path.getsize(judge_file) / float(1024 * 1024), 1) >= check_size:
         # print("no error, filesize:")
         # print(os.path.getsize(judge_file))
         return 1
     else:
         self.false_check_reasion.append(file_name)
         print(getTime() + "\033[1;31m [ERROR]\033[0m file:", file_name,
               " is\033[1;31m wrong\033[0m\n")
         print("filesize:")
         print(os.path.getsize(judge_file))
         return 0
Esempio n. 9
0
    def getPredictionResult(self, dir_path):
        "add the control evaluation result to tag"

        ap_result = defaultdict(lambda: {})
        result_file_path = os.path.join(dir_path,
                                        'prediction_evaluation/result')
        ap_json = loadTag(result_file_path, tag_file_name='result.json')

        if ap_json is not None:
            try:
                ap_result["quality"] = ap_json["quality"]
            except Exception as e:
                logger(1, str(e), LOG_FILE="upload_list/error.log")
                print(getTime() +
                      "\033[1;31m [ERROR]\033[0m get ap eval info error ")
                ap_result["quality"] = {}
                ap_result["quality"]['level'] = "bad"
        return ap_result
Esempio n. 10
0
    def getControlResult(self, dir_path):
        "add the control evaluation result to tag"

        control_result = defaultdict(lambda: {})
        result_file_path = os.path.join(dir_path, 'logs/control_eval')
        control_json = loadTag(result_file_path,
                               tag_file_name='control_eval_results.json')

        if control_json is not None:
            try:
                control_result = control_json["control_result"]
                if 'stop_error' in control_result['control_precision']:
                    control_result['control_precision']['stop_error'][
                        'std'] = 0.901
            except Exception as e:
                logger(1, str(e), LOG_FILE="upload_list/error.log")
                print(getTime() +
                      "\033[1;31m [ERROR]\033[0m get control eval info error ")
        return control_result
Esempio n. 11
0
 def mainUpload(self, dir_path, upload):
     '''
     upload and then archive data
     :param dir_path_without: data path end without /
     '''
     print("checkpoint36")
     print(dir_path)
     tag_info = loadTag(dir_path)
     print(tag_info)
     dir_name = os.path.split(dir_path)[1]
     print(dir_name)
     try:
         self.data_upload(dir_path, tag_info, slice=False, upload=upload)
         archive_path = self.dirArchive(dir_path, tag_info)
         generate_data_report.main(archive_path + '/' + dir_name, True)
         if os.path.exists(dir_path + "_slice/"):
             self.data_upload(dir_path, tag_info, slice=True, upload=upload)
             self.dirArchive(dir_path + "_slice", tag_info)
         return 0
     except Exception as e:
         print(traceback.format_exc())
         logger(1, str(e), LOG_FILE="upload_list/error.log")
         print(getTime() + "\033[1;31m [ERROR]\033[0m upload error ")
         return 1
Esempio n. 12
0
    def data_upload(self, dir_path, tag_info, slice=False, upload=True):
        'data_upload with aws'
        dir_name = os.path.split(dir_path)[1]
        vehicle_id = tag_info["test_car_id"].replace("-", "")
        data_month = tag_info["test_date"].rsplit("_", 1)[0]

        try:
            feature = False if tag_info["issue_id"] == ["repo_master"
                                                        ] else True
        except Exception as e:
            feature = False
        task_id = str(tag_info["task_id"]) + '/' if feature else ''
        if slice:
            upload_path = ''.join([dir_path, "_slice/ "])
            dst_path = ''.join([
                self.getDataCollectionDstLink(tag_info, data_month,
                                              slice=True),
                tag_info["test_date"], '/', vehicle_id, '/', task_id
            ])
            self.sliceDataCheck(dir_path + '_slice/')
        else:
            upload_path = ''.join([dir_path, '/ '])
            tag_path = ''.join([
                self.getDataCollectionDstLink(tag_info,
                                              data_month,
                                              slice=False),
                tag_info["test_date"], '/', vehicle_id, '/', task_id, dir_name
            ])
            dst_path = ''.join([tag_path, '/'])
            tag_info["data_link"] = tag_path
            tag_info["data_type"] = "raw"
            tag_info['aws_endpoint'] = self.end_point
            if tag_info["test_type"] == 1:
                aa, eval_result = self.getPPEvalResult(tag_path)
                if aa:
                    tag_info["pp_evaluation"] = eval_result
            if tag_info["test_type"] == 3 and self.config_["evaluation"]:
                generate_evaluation_result.generateAdasEval(
                    dir_path, self.config_)
                tag_info["adas_evaluation"] = self.getAdasResult(tag_path)
            try:
                if self.false_reason != []:
                    for false_check in self.false_reason:
                        tag_info["global_tag"].append("no_" + false_check)
                if self.tprfile_resut != []:
                    for tprofile in self.tprfile_resut:
                        tag_info["global_tag"].append(tprofile)
            except Exception as e:
                print("write global tag error")

            saveTag(dir_path, tag_info, file_name='data-tag.json')

        arg2 = ''.join([
            "s3 cp ", upload_path, dst_path, upload_recursive + self.end_point
        ])
        # arg_slam =''.join(["s3 cp ", os.path.join(dir_path,'logs/map_update_areas.json '),
        #                   "s3://sz21_data_collection/slam_file/"+data_month+'/'+tag_info["test_date"]+'/'+
        #                    vehicle_id+'/'+task_id+dir_name+'/'+tag_info.get("route")+'/map_update_areas.json',
        #                   " --profile ad_system_common --only-show-errors --endpoint-url=" + self.end_point])
        print(getTime() + "\033[1;32m [INFO] Uploading ...\033[0m ", arg2)
        if upload:
            star_upload_time = time.time()
            file_size = getFileSize(upload_path.replace(' ', ''))
            upload_result = AWS_DRIVER.main(arg2.split())
            # AWS_DRIVER.main(arg_slam.split())
            cost_time = time.time() - star_upload_time
            upload_speed = file_size / cost_time
            logger(1, "Uploaded dir:" + upload_path)
            if upload_result == 0:
                print (getTime()+"\033[1;32m [INFO]", dir_name + \
                     "\033[0m", "\033[0m has\033[1;32m uploaded successfully! Speed:\033[0m "+ str(upload_speed) +" MB/s")
            else:
                print(getTime() + "\033[1;32m [INFO]", dir_name + "\033[0m",
                      "\033[0m \033[1;32m upload failed!\033[0m")
        self.TransferPost(tag_info)
Esempio n. 13
0
    def dataSegment(self, dir_name, segment_point):
        "data segment pipeline"
        print("checkpoint40")
        print(self.tag_module_list)
        segment_list = []
        case_tagging_list = []
        prediction_tagging_list = []
        for record_tag in segment_point:  # record_tag is every set
            #dir_path = os.path.join(self.input_data_path, dir_name)
            dir_path = self.input_data_path
            tag_name = record_tag.get("tag_en")  # tag_name = cloudyday etc

            if not tag_name in self.tag_module_list:  # special manual tagging, take over, dangerous driving etc
                if "data_type" in record_tag and record_tag[
                        "data_type"] == "eval":
                    level_name, log_type, tagging_module = ["EVAL", 0, 1]
                else:
                    level_name, log_type, tagging_module = [
                        "3D-Perception", 0, 1
                    ]
                if self.tag_info['test_type'] == 3:
                    level_name, log_type, tagging_module = ["ADAS", 0, 1]
            else:
                level_name = self.tag_module_list[tag_name]["level_name"]
                log_type = self.tag_module_list[tag_name]["log_type"]
                tagging_module = self.tag_module_list[tag_name][
                    "tagging_module"]
            print(getTime() + "\033[1;32m [INFO] \033[0m", level_name, "==>> ",
                  tag_name, "==== module_type:", log_type)

            if "end" in record_tag:
                front_time = 2
                behind_time = (record_tag["end"] - record_tag["start"]) // 1000
            else:
                if not tag_name in self.tag_module_list:
                    front_time = 20
                    behind_time = 10
                else:
                    front_time = self.tag_module_list[tag_name]["front_time"]
                    behind_time = self.tag_module_list[tag_name]["behind_time"]

            if behind_time < 6:
                behind_time = 10

            if "end" in record_tag:
                input_timestamp = (record_tag["start"] +
                                   record_tag["end"]) / 2000
            else:
                input_timestamp = record_tag["start"] / 1000

            time_point = record_tag["start"] * 1000
            test_date = self.tag_info["test_date"]
            test_time = str(record_tag["start_format"].replace(":", "_"))
            segment_dir_name = ''.join([test_date, '_', test_time])
            print("checkpoint41")
            output_path = ''.join([
                dir_path, '_slice/', level_name, '/', tag_name, '/',
                segment_dir_name
            ])
            print(output_path)
            if self.tag_info["issue_id"][0] != "repo_master" and \
                (record_tag['tag_en'] == "take_over" or record_tag['tag_en'] == "Emergency_brake"):
                segment_list.append({
                    "output_dir": output_path,
                    "time_point": time_point,
                    "front_duration": front_time,
                    "behind_duration": behind_time,
                    "log_type": log_type
                })
            else:
                segment_list.append({
                    "output_dir": output_path,
                    "time_point": time_point,
                    "front_duration": front_time,
                    "behind_duration": behind_time,
                    "log_type": log_type
                })
            if level_name != "EVAL":
                case_tagging_list.append({
                    "input_dir":
                    ''.join([output_path, '/']),
                    "module_name":
                    tag_name,
                    "input_timestamp":
                    input_timestamp,
                    "tagging_module":
                    tagging_module
                })
            if tag_name == "abnormal_prediction_trajectory":
                prediction_tagging_list.append({
                    "input_dir":
                    ''.join([output_path, '/']),
                    "module_name":
                    tag_name,
                    "input_timestamp":
                    input_timestamp
                })

            if not os.path.exists(output_path):
                print("checkpoint42")
                os.makedirs(output_path)
            print("checkpoint43")
            task_id = '' if self.tag_info["issue_id"][
                0] == "repo_master" else str(self.tag_info["task_id"]) + '/'

            vehicle_id = self.tag_info["test_car_id"].replace("-", "")
            module_tag_data = copy.deepcopy(self.tag_info)
            data_month = self.tag_info["test_date"].rsplit("_", 1)[0]
            dst_path = ''.join([
                self.getDataCollectionDstLink(self.tag_info,
                                              data_month,
                                              slice=True),
                self.tag_info["test_date"], '/', vehicle_id, '/', task_id
            ])
            module_tag_data["data_link"] = ''.join(
                [dst_path, level_name, '/', tag_name, '/', segment_dir_name])
            store_path = ''.join([
                dir_path, '_slice/', level_name, '/', tag_name, '/',
                segment_dir_name
            ])
            module_tag_data["origin_record_tag"] = [record_tag]
            if "data_type" in record_tag and record_tag["data_type"] == "eval":
                module_tag_data["data_type"] = record_tag["data_type"]
                module_tag_data["data_type"] = "segment"
                module_tag_data["test_type"] = 9
                self.post = True
            else:
                module_tag_data["data_type"] = "segment"
            module_tag_data["file_name"] = segment_dir_name
            module_tag_data["raw_data_link"] = ''.join([
                self.getDataCollectionDstLink(self.tag_info,
                                              data_month,
                                              slice=False),
                self.tag_info["test_date"], '/', vehicle_id, '/', task_id,
                dir_name
            ])
            module_tag_data['aws_endpoint'] = self.end_point
            saveTag(store_path, module_tag_data, file_name='data-tag.json')
            self.generateLogDownloadFile(log_type,
                                         module_tag_data["raw_data_link"],
                                         output_path)
        print("checkpoint44")
        print(dir_path)
        print(segment_list)
        cut_rec_multiprocess.main(dir_path, segment_list)
        print("checkpoint45")
        if self.tag_info["test_type"] == 1:
            self.case_tagging.tagMain(self.tag_info["global_tag"], dir_path,
                                      case_tagging_list)
Esempio n. 14
0
    def checkRec(self, dir_path, slice=False):
        # looks through the following folders: cache, config, cv22, logs, screen_cast, sensor_logs
        #    sensors_record, timing logger, versions
        print("checkpoint25")
        print(dir_path)
        '''
        :param dir_path: data path end with /
        :param slice: define the checking data is raw data or segment data
        :return: True of False
        '''
        video_result = 0
        check_result = 0
        try:
            check_tag = loadTag(dir_path, 'data-tag.json')
            if check_tag is not None and check_tag["test_car_id"] == "CN-001":
                return True, []
            if check_tag is not None and (check_tag["test_car_id"] == "CN-006"
                                          or check_tag["test_car_id"]
                                          == "CN-007"):
                check_result += 1
        except Exception as e:
            print(getTime() + "\033[1;31m [ERROR]\033[0m not CN-001 data ")

        self.false_check_reasion = []
        false_reason = []
        if not os.path.exists(dir_path):
            return False, false_reason
        for file_name in self.check_file_name_list:
            print(file_name)
            check_size = self.check_file_name_list[file_name]
            print("checkpoint21")
            print(dir_path)
            reult_0_1 = self.judgeFileSizeAndExist(dir_path,
                                                   file_name,
                                                   check_size=check_size)
            print(file_name)
            print(reult_0_1)
            print("checkpoint22")
            check_result += reult_0_1
            print(check_result)
            if reult_0_1 == 0:
                false_reason.append(file_name)
        pattern = "port_*"

        video_files = self.getMatchedFilePaths(
            dir_path,
            pattern,
            formats=[".avi", ".h264", "mp4"],
            recursive=True)

        for video_name in video_files:
            print("checkpoint23")
            video_reult_0_1 = self.judgeFileSizeAndExist(dir_path='',
                                                         file_name=video_name,
                                                         check_size=1)
            print("checkpoint24")
            print(video_reult_0_1)
            video_result += video_reult_0_1
            if video_reult_0_1 == 0:
                false_reason.append(file_name)

        for dir_name in ['cache', 'config', 'logs', 'timing_logger']:
            log_dir_path = os.path.join(dir_path, dir_name)
            if not os.path.exists(log_dir_path):
                false_reason.append("cache")

        if check_result >= 4 and video_result > 0:
            print("checkpoint27")
            print(getTime() + "\033[1;32m [INFO]\033[0m Dir:", dir_path,
                  "is \033[1;32m correct\033[0m")
            return True, false_reason
        else:
            print("checkpoint26")
            print(check_result)
            print(video_result)
            print(getTime() + "\033[1;31m [ERROR]\033[0m Dir:", dir_path,
                  "is\033[1;31m wrong\033[0m")
            return False, false_reason
Esempio n. 15
0
    def mainSegment(self, file_list, upload=False):
        '''
        :param dir_name: the dir to be process
        :param upload: whether upload , default as False
        :return: o -> right ; 1-> error
        '''
        print("checkpoint1000")
        print(self.input_data_path)
        dir_path = self.input_data_path
        # dir_path = os.path.join(self.input_data_path, dir_name)

        # check the dir whether right
        check_result, self.false_reason = self.checkRec(dir_path)
        print("calling checkRec")
        for dir_name in file_list:
            print("checkpoint1006")
            print(dir_name)
            print(dir_path)

            # get the data-tag.json of checked dir
            tag_data = self.deterDirProperty(dir_path)
            if not check_result:
                if 'test_type' in tag_data.keys(
                ) and tag_data['test_type'] == 3:
                    pass
                if "issue_id" not in tag_data.keys(
                ) or tag_data["issue_id"] == []:
                    tag_data["issue_id"] = ["feature"]
                if tag_data is not None and tag_data["issue_id"] != [
                        "repo_master"
                ] and tag_data["task_id"] != 30000:
                    pass
                else:
                    self.falseDataArchive(dir_path, self.false_reason)
                    return

            if "backup" in tag_data:
                tag_data = tag_data["backup"][0]["data_tag"]
            tag_data["test_date"] = tag_data["test_date"].replace('-', '_')
            if tag_data is None:
                return 1
            if not "global_tag" in tag_data:
                tag_data["global_tag"] = []

            ## generate evluation result with different module
            if tag_data["test_type"] == 1 and self.config_["evaluation"]:
                pool1 = multiprocessing.Pool(processes=12)
                pool1.apply_async(
                    generate_evaluation_result.generateLocalizationEval,
                    args=(
                        dir_path,
                        self.config_,
                    ))
                pool1.close()
                pool1.join()
                # try:
                #     generate_evaluation_result.generateTprofileEval(dir_path,self.config_)
                # except Exception as e:
                #     print getTime()+"\033[1;31m [ERROR]\033[0m tprofiling error "
            if "issue_id" not in tag_data.keys() or tag_data["issue_id"] == []:
                tag_data["issue_id"] = ["feature"]
                saveTag(dir_path, tag_data)

            self.tag_info = tag_data
            self.tag_info["localization_result"] = self.getLocalizationResult(
                dir_path.replace(" ", ""))
            self.tag_info["control_result"] = {}
            self.tag_info["ap_result"] = {}
            self.tag_info["file_name"] = dir_name
            self.tag_info["disk_id"] = self.getDiskId(dir_path)
            self.addEvalTag(dir_path)
            self.tag_info["origin_record_tag"] = self.deDuplication(
                self.tag_info["origin_record_tag"])
            saveTag(dir_path, self.tag_info)
            self.post = True
            segment, upload = self.judgeIfSegmentAndUpload(
                self.tag_info, check_result)

        try:
            if self.tag_info["test_type"] == 2 and self.tag_info[
                    "issue_id"] == ["AUTODRIVE-6814"]:
                overlap = self.reouteCheckFi(dir_path,
                                             self.tag_info["test_car_id"])
                self.tag_info['global_tag'].append(overlap)
                try:
                    self.tag_info = self.getObjectNumber(
                        dir_path, self.tag_info)
                except:
                    pass
                if None in self.tag_info['global_tag']:
                    self.tag_info['global_tag'].remove(None)
                saveTag(dir_path, self.tag_info)
            print("checkpoint29")
            print(dir_path)
            print(dir_name)
            if segment:
                print("checkpoint30")
                self.segmentPreprationCollection(dir_name)
                print("checkpoint31")
                print(dir_path)
                print(dir_name)
            print("checkpoint32")
            self.mainUpload(dir_path, upload)
            print("checkpoint33")
        except Exception as e:
            print(traceback.format_exc())
            logger(1, str(e), LOG_FILE="upload_list/error.log")
            print(getTime() +
                  "\033[1;31m [ERROR]\033[0m segment or upload error ")
            return 1
        return 0