Example #1
0
 def sliceDataCheck(self, dir_path):
     "segmented data check and post"
     data_tag_paths = self.getMatchedFilePaths(dir_path,
                                               pattern="data-ta*",
                                               formats=[".json"],
                                               recursive=True)
     for slice_data_tag_path in data_tag_paths:
         slice_data_path = os.path.split(slice_data_tag_path)[0]
         check_result, false_reason = self.checkRec(slice_data_path,
                                                    slice=True)
         slice_data_tag = loadTag(slice_data_tag_path, '')
         record_tag = slice_data_tag["origin_record_tag"][0]
         slice_data_tag["origin_record_tag"] = \
             [self.case_toss.mainToss(self.local_eval_tag,os.path.split(slice_data_tag_path)[0],
                                      record_tag,slice_data_tag)]
         saveTag(slice_data_tag_path, slice_data_tag, '')
         if self.tag_info['master'] == False:
             pass
         else:
             if not check_result:
                 saveTag('upload_list/false_segment', slice_data_tag,
                         slice_data_tag["file_name"] + '.json')
                 continue
         if self.tag_info['test_type'] == 3 and not os.path.exists(
                 os.path.split(slice_data_tag_path)[0] + '/cv22'):
             continue
         self.TransferPost(slice_data_tag)
Example #2
0
 def generateTag(self, dir_path):
     generated_tag = loadTag("config/default_data-tag.json", '')
     dir_name = os.path.basename(dir_path)
     generated_tag["file_name"] = dir_name
     if len(dir_name) > 10:
         generated_tag["test_date"] = dir_name[0:10]
     saveTag(dir_path, generated_tag)
     return generated_tag
def main():
    statics_result ={"test_duration":0,
                     "test_mileage":0,
                     'route':{

                     },
                     "record_tag":{
                     }}
    vnumber = {
        "fewer_objects":0,
        "normal_objects":0,
        "many_objects":0
    }
    dir_path = "/media/sensetime/FieldTest2/aaa/"
    tag_list = getAllDataDir(dir_path)

    print(len(tag_list))
    i = 0
    result_list = []
    for tag_path in tag_list:
        # tag_data = loadTag(tag_path,'')
        # if tag_data is None:
        #     continue
        tag_path= os.path.join(dir_path,tag_path)
        bin_path =  os.path.join(tag_path,'simulator_scenario/simulator_scenario_log.bin')
        try:
            case_finder(bin_path, tag_path, 0)
        except:
            continue
        object_number = getObjectNumber(tag_path)
        if object_number !=0 and object_number != []:
            result_list+=object_number

        # if "backup" in tag_data.keys():
        #     tag_data = tag_data["backup"][0]["data_tag"]
        #     tag_data["test_date"] = tag_data["test_date"].replace('-', '_')
        #
        # if "test_duration" in tag_data.keys():
        #     statics_result["test_duration"]+=tag_data["test_duration"]
        # if "test_mileage" in tag_data.keys() and tag_data["test_mileage"]>0 and tag_data["test_mileage"]<200:
        #     statics_result["test_mileage"]+=tag_data["test_mileage"]
        # if "origin_record_tag" in tag_data.keys() and tag_data["origin_record_tag"] != []:
        #     for record_tag in tag_data["origin_record_tag"]:
        #         if not record_tag["tag_en"] in statics_result["record_tag"].keys():
        #             statics_result["record_tag"][record_tag["tag_en"]] = 0
        #         statics_result["record_tag"][record_tag["tag_en"]] +=1
        # if 'route' in tag_data.keys():
        #     if tag_data['route'] not in statics_result["route"]:
        #         statics_result["route"][tag_data['route']] =0
        #     statics_result["route"][tag_data['route']] += 1
        # i+=1
        # print(i)
    result_list = sorted(result_list,reverse = True)
    print(result_list)
    statics_result["object_number"] = result_list
    #statics_result["object_size"] = sum(result_list)/len(result_list)
    saveTag(dir_path,statics_result,'result.json')
def main():
    statics_result = {}
    vnumber = {"fewer_objects": 0, "normal_objects": 0, "many_objects": 0}
    dir_path = "/media/sensetime/FieldTest1/data/04_10_CN-013_ARH/"
    tag_list = getMatchedFilePaths(dir_path, 'cpu_resu*', '.json', True)

    print(len(tag_list))
    i = 0
    result_list = []
    for tag_path in tag_list:
        i += 1
        tag_data = loadTag(tag_path, '')
        if tag_data is None:
            continue

        if "backup" in tag_data.keys():
            tag_data = tag_data["backup"][0]["data_tag"]
            tag_data["test_date"] = tag_data["test_date"].replace('-', '_')
        print(tag_data)
        for core in tag_data:
            if core not in statics_result:
                statics_result[core] = 0
            statics_result[core] += tag_data[core]

        # for core in tag_data["sensor_eval"]:
        #     if core not in statics_result:
        #         statics_result[core]={}
        #         statics_result[core]["stddev_frame_rate"] = 0
        #         statics_result[core]["avg_frame_rate"] = 0
        #     statics_result[core]["stddev_frame_rate"] += tag_data["sensor_eval"][core]["stddev_frame_rate"]
        #     statics_result[core]["avg_frame_rate"] += tag_data["sensor_eval"][core]["avg_frame_rate"]
        # for core in tag_data["module_eval"]:
        #     if core not in statics_result:
        #         statics_result[core] = {}
        #         statics_result[core]["stddev_frame_rate"] = 0
        #         statics_result[core]["avg_frame_rate"] = 0
        #     statics_result[core]["stddev_frame_rate"] += tag_data["module_eval"][core]["stddev_frame_rate"]
        #     statics_result[core]["avg_frame_rate"] += tag_data["module_eval"][core]["avg_frame_rate"]

        print(i)
    for core in statics_result:
        # statics_result[core]["stddev_frame_rate"] = statics_result[core]["stddev_frame_rate"]/i
        # statics_result[core]["avg_frame_rate"] = statics_result[core]["avg_frame_rate"] / i
        statics_result[core] = statics_result[core] / i
    saveTag(dir_path, statics_result, 'result.json')

    saveTag(dir_path, vnumber, 'statics_result_12.json')
Example #5
0
def TransferPost(data_tag):
    curl = \
        'https://fieldtest.sensetime.com/task/daemon/update/tag'
    post_result = requests.post(curl,
                                headers=headerdata,
                                data=json.dumps(data_tag))
    print "\n \033[1;32m [INFO]\033[0m ", post_result.text
    print(data_tag["file_name"])
    if post_result.text != u'{"result":"true"}':
        try:
            saveTag('../upload_list/', data_tag,
                    data_tag["file_name"] + '.json')
        except Exception as e:
            print("save upload failed tag failed")
        return False
    else:
        return True
Example #6
0
 def TransferPost(self, data_tag):
     "post the data tag to senseFT"
     curl = \
         'https://fieldtest.sensetime.com/task/daemon/update/tag'
     post_result = requests.post(curl,
                                 headers=self.headerdata,
                                 data=json.dumps(data_tag))
     print(getTime() + "\033[1;32m [INFO]\033[0m ", post_result.text, '\n')
     if post_result.text != u'{"result":"true"}':
         try:
             logger(2,
                    "Uploaded dir:" + data_tag["data_link"],
                    LOG_FILE="upload_list/post_failed_file.log")
             saveTag('upload_list/', data_tag,
                     data_tag["file_name"] + '.json')
         except Exception as e:
             print(getTime() +
                   "\033[1;31m [ERROR]\033[0m save post-failed tag error ")
def getFalseList(file_path, check_result=False):
    '''

    :param input_file_list:
    :param check:
    :return:
    '''

    # if not judge_file_data(file_path):
    #     return
    data_report_tag = loadTag('../data_report/', formatted_today + '.json')
    date = str(formatted_today)
    total_file_size = 0
    print('===', date)
    if not date in data_report_tag.keys():
        data_report_tag[date] = defaultdict(lambda: {})

    file_name = file_path.split('/', -1)[-1]

    file_size = getFileSize(file_path)
    if not 'false' in data_report_tag[date]:
        data_report_tag[date]['false'] = defaultdict(lambda: {})
    if not file_name in data_report_tag[date]['false']:
        data_report_tag[date]['false'][file_name] = defaultdict(lambda: {})
    data_report_tag[date]['false'][file_name]["upload_date"] = date
    data_report_tag[date]['false'][file_name]["data_type"] = "raw"
    data_report_tag[date]['false'][file_name]["data_name"] = file_name
    data_report_tag[date]['false'][file_name]["check_result"] = check_result
    data_report_tag[date]['false'][file_name]["file_size(MB)"] = file_size
    data_report_tag[date]['false'][file_name]["data_link"] = ''.join(["s3://sh40_fieldtest_dataset/",data_month,'/false_data/', file_name])

    total_file_size += file_size

    file_size = str(check_result) + "_file_size(MB)"
    if not file_size in data_report_tag[date].keys():
        data_report_tag[date][file_size] = 0
    data_report_tag[date][file_size] += total_file_size

    print("\033[1;32m [INFO]\033[0m! generate data report successfully\n")
    saveTag('../data_report/', data_report_tag, formatted_today + '.json')
def getObjectNumber(dir_path):

    object_count = loadTag(dir_path, 'object_count.json')
    number = []
    try:
        for count in object_count["Object_number"]:
            for a in count:
                number.append(count[a])
        #object_size = number / len(object_count["Object_number"])
    except:
        return 0

    # if object_size < 800:
    #     tag_info['global_tag'].append("fewer_objects")
    # elif object_size > 800 and object_size < 1800:
    #     tag_info['global_tag'].append("normal_objects")
    # elif object_size > 1800:
    #     tag_info['global_tag'].append("many_objects")
    return number


    saveTag(dir_path,vnumber,'statics_result_12.json')
def main():
    statics_result = {
        "test_duration": 0,
        "test_mileage": 0,
        'route': {},
        "record_tag": {}
    }
    vnumber = {"fewer_objects": 0, "normal_objects": 0, "many_objects": 0}
    dir_path = "/media/sensetime/FieldTest2/data/123/"
    tag_list = getMatchedFilePaths(dir_path, 'data*', '.json', True)

    print(len(tag_list))
    i = 0
    result_list = []
    for tag_path in tag_list:
        tag_data = loadTag(tag_path, '')
        if tag_data is None:
            continue
        # for count in tag_data["Object_number"]:
        #     for a in count:
        #         if count[a] < 800:
        #             vnumber['fewer_objects'] += 1
        #         elif count[a] > 800 and count[a] < 1800:
        #             vnumber['normal_objects'] += 1
        #         elif count[a] > 1800:
        #             vnumber['many_objects'] += 1
        # if "ego_tags" in tag_data.keys() and  "CloseToJunction" in tag_data["ego_tags"]:
        #     result_list.append(os.path.basename(os.path.split(tag_path.split('screen_cast')[0])[0]))
        #     print(result_list)

        if "backup" in tag_data.keys():
            tag_data = tag_data["backup"][0]["data_tag"]
            tag_data["test_date"] = tag_data["test_date"].replace('-', '_')
        # try:
        #     if tag_data["test_date"].split('_',-1)[0] +'_'+tag_data["test_date"].split('_',-1)[1]!= "2020_09":
        #         continue
        # except Exception as e:
        #     continue
        # if tag_data["test_car_id"] != "CN-012":
        #     continue
        # if tag_data["test_type"] != 2:
        #     continue
        #print(tag_path)
        if "test_duration" in tag_data.keys():
            statics_result["test_duration"] += tag_data["test_duration"]
        if "test_mileage" in tag_data.keys(
        ) and tag_data["test_mileage"] > 0 and tag_data["test_mileage"] < 200:
            statics_result["test_mileage"] += tag_data["test_mileage"]
        if "origin_record_tag" in tag_data.keys(
        ) and tag_data["origin_record_tag"] != []:
            for record_tag in tag_data["origin_record_tag"]:
                if not record_tag["tag_en"] in statics_result[
                        "record_tag"].keys():
                    statics_result["record_tag"][record_tag["tag_en"]] = 0
                statics_result["record_tag"][record_tag["tag_en"]] += 1
        if 'route' in tag_data.keys():
            if tag_data['route'] not in statics_result["route"]:
                statics_result["route"][tag_data['route']] = 0
            statics_result["route"][tag_data['route']] += 1
        i += 1
        print(i)
    saveTag(dir_path, statics_result, 'result.json')
    # if "global_tag" in tag_data.keys():
    #     for tag in tag_data["global_tag"]:
    #         if tag in ["fewer_objects","normal_objects","many_objects"]:
    #             vnumber[tag] +=1

    # with open('/media/sensetime/FieldTest1/data/result.txt', 'w') as fw:
    #     for path in result_list:
    #         fw.write(path + '\n')

    saveTag(dir_path, vnumber, 'statics_result_12.json')
Example #10
0
    def data_upload(self, dir_path, tag_info, slice=False, upload=True):
        'data_upload with aws'
        dir_name = os.path.split(dir_path)[1]
        vehicle_id = tag_info["test_car_id"].replace("-", "")
        data_month = tag_info["test_date"].rsplit("_", 1)[0]

        try:
            feature = False if tag_info["issue_id"] == ["repo_master"
                                                        ] else True
        except Exception as e:
            feature = False
        task_id = str(tag_info["task_id"]) + '/' if feature else ''
        if slice:
            upload_path = ''.join([dir_path, "_slice/ "])
            dst_path = ''.join([
                self.getDataCollectionDstLink(tag_info, data_month,
                                              slice=True),
                tag_info["test_date"], '/', vehicle_id, '/', task_id
            ])
            self.sliceDataCheck(dir_path + '_slice/')
        else:
            upload_path = ''.join([dir_path, '/ '])
            tag_path = ''.join([
                self.getDataCollectionDstLink(tag_info,
                                              data_month,
                                              slice=False),
                tag_info["test_date"], '/', vehicle_id, '/', task_id, dir_name
            ])
            dst_path = ''.join([tag_path, '/'])
            tag_info["data_link"] = tag_path
            tag_info["data_type"] = "raw"
            tag_info['aws_endpoint'] = self.end_point
            if tag_info["test_type"] == 1:
                aa, eval_result = self.getPPEvalResult(tag_path)
                if aa:
                    tag_info["pp_evaluation"] = eval_result
            if tag_info["test_type"] == 3 and self.config_["evaluation"]:
                generate_evaluation_result.generateAdasEval(
                    dir_path, self.config_)
                tag_info["adas_evaluation"] = self.getAdasResult(tag_path)
            try:
                if self.false_reason != []:
                    for false_check in self.false_reason:
                        tag_info["global_tag"].append("no_" + false_check)
                if self.tprfile_resut != []:
                    for tprofile in self.tprfile_resut:
                        tag_info["global_tag"].append(tprofile)
            except Exception as e:
                print("write global tag error")

            saveTag(dir_path, tag_info, file_name='data-tag.json')

        arg2 = ''.join([
            "s3 cp ", upload_path, dst_path, upload_recursive + self.end_point
        ])
        # arg_slam =''.join(["s3 cp ", os.path.join(dir_path,'logs/map_update_areas.json '),
        #                   "s3://sz21_data_collection/slam_file/"+data_month+'/'+tag_info["test_date"]+'/'+
        #                    vehicle_id+'/'+task_id+dir_name+'/'+tag_info.get("route")+'/map_update_areas.json',
        #                   " --profile ad_system_common --only-show-errors --endpoint-url=" + self.end_point])
        print(getTime() + "\033[1;32m [INFO] Uploading ...\033[0m ", arg2)
        if upload:
            star_upload_time = time.time()
            file_size = getFileSize(upload_path.replace(' ', ''))
            upload_result = AWS_DRIVER.main(arg2.split())
            # AWS_DRIVER.main(arg_slam.split())
            cost_time = time.time() - star_upload_time
            upload_speed = file_size / cost_time
            logger(1, "Uploaded dir:" + upload_path)
            if upload_result == 0:
                print (getTime()+"\033[1;32m [INFO]", dir_name + \
                     "\033[0m", "\033[0m has\033[1;32m uploaded successfully! Speed:\033[0m "+ str(upload_speed) +" MB/s")
            else:
                print(getTime() + "\033[1;32m [INFO]", dir_name + "\033[0m",
                      "\033[0m \033[1;32m upload failed!\033[0m")
        self.TransferPost(tag_info)
Example #11
0
    def dataSegment(self, dir_name, segment_point):
        "data segment pipeline"
        print("checkpoint40")
        print(self.tag_module_list)
        segment_list = []
        case_tagging_list = []
        prediction_tagging_list = []
        for record_tag in segment_point:  # record_tag is every set
            #dir_path = os.path.join(self.input_data_path, dir_name)
            dir_path = self.input_data_path
            tag_name = record_tag.get("tag_en")  # tag_name = cloudyday etc

            if not tag_name in self.tag_module_list:  # special manual tagging, take over, dangerous driving etc
                if "data_type" in record_tag and record_tag[
                        "data_type"] == "eval":
                    level_name, log_type, tagging_module = ["EVAL", 0, 1]
                else:
                    level_name, log_type, tagging_module = [
                        "3D-Perception", 0, 1
                    ]
                if self.tag_info['test_type'] == 3:
                    level_name, log_type, tagging_module = ["ADAS", 0, 1]
            else:
                level_name = self.tag_module_list[tag_name]["level_name"]
                log_type = self.tag_module_list[tag_name]["log_type"]
                tagging_module = self.tag_module_list[tag_name][
                    "tagging_module"]
            print(getTime() + "\033[1;32m [INFO] \033[0m", level_name, "==>> ",
                  tag_name, "==== module_type:", log_type)

            if "end" in record_tag:
                front_time = 2
                behind_time = (record_tag["end"] - record_tag["start"]) // 1000
            else:
                if not tag_name in self.tag_module_list:
                    front_time = 20
                    behind_time = 10
                else:
                    front_time = self.tag_module_list[tag_name]["front_time"]
                    behind_time = self.tag_module_list[tag_name]["behind_time"]

            if behind_time < 6:
                behind_time = 10

            if "end" in record_tag:
                input_timestamp = (record_tag["start"] +
                                   record_tag["end"]) / 2000
            else:
                input_timestamp = record_tag["start"] / 1000

            time_point = record_tag["start"] * 1000
            test_date = self.tag_info["test_date"]
            test_time = str(record_tag["start_format"].replace(":", "_"))
            segment_dir_name = ''.join([test_date, '_', test_time])
            print("checkpoint41")
            output_path = ''.join([
                dir_path, '_slice/', level_name, '/', tag_name, '/',
                segment_dir_name
            ])
            print(output_path)
            if self.tag_info["issue_id"][0] != "repo_master" and \
                (record_tag['tag_en'] == "take_over" or record_tag['tag_en'] == "Emergency_brake"):
                segment_list.append({
                    "output_dir": output_path,
                    "time_point": time_point,
                    "front_duration": front_time,
                    "behind_duration": behind_time,
                    "log_type": log_type
                })
            else:
                segment_list.append({
                    "output_dir": output_path,
                    "time_point": time_point,
                    "front_duration": front_time,
                    "behind_duration": behind_time,
                    "log_type": log_type
                })
            if level_name != "EVAL":
                case_tagging_list.append({
                    "input_dir":
                    ''.join([output_path, '/']),
                    "module_name":
                    tag_name,
                    "input_timestamp":
                    input_timestamp,
                    "tagging_module":
                    tagging_module
                })
            if tag_name == "abnormal_prediction_trajectory":
                prediction_tagging_list.append({
                    "input_dir":
                    ''.join([output_path, '/']),
                    "module_name":
                    tag_name,
                    "input_timestamp":
                    input_timestamp
                })

            if not os.path.exists(output_path):
                print("checkpoint42")
                os.makedirs(output_path)
            print("checkpoint43")
            task_id = '' if self.tag_info["issue_id"][
                0] == "repo_master" else str(self.tag_info["task_id"]) + '/'

            vehicle_id = self.tag_info["test_car_id"].replace("-", "")
            module_tag_data = copy.deepcopy(self.tag_info)
            data_month = self.tag_info["test_date"].rsplit("_", 1)[0]
            dst_path = ''.join([
                self.getDataCollectionDstLink(self.tag_info,
                                              data_month,
                                              slice=True),
                self.tag_info["test_date"], '/', vehicle_id, '/', task_id
            ])
            module_tag_data["data_link"] = ''.join(
                [dst_path, level_name, '/', tag_name, '/', segment_dir_name])
            store_path = ''.join([
                dir_path, '_slice/', level_name, '/', tag_name, '/',
                segment_dir_name
            ])
            module_tag_data["origin_record_tag"] = [record_tag]
            if "data_type" in record_tag and record_tag["data_type"] == "eval":
                module_tag_data["data_type"] = record_tag["data_type"]
                module_tag_data["data_type"] = "segment"
                module_tag_data["test_type"] = 9
                self.post = True
            else:
                module_tag_data["data_type"] = "segment"
            module_tag_data["file_name"] = segment_dir_name
            module_tag_data["raw_data_link"] = ''.join([
                self.getDataCollectionDstLink(self.tag_info,
                                              data_month,
                                              slice=False),
                self.tag_info["test_date"], '/', vehicle_id, '/', task_id,
                dir_name
            ])
            module_tag_data['aws_endpoint'] = self.end_point
            saveTag(store_path, module_tag_data, file_name='data-tag.json')
            self.generateLogDownloadFile(log_type,
                                         module_tag_data["raw_data_link"],
                                         output_path)
        print("checkpoint44")
        print(dir_path)
        print(segment_list)
        cut_rec_multiprocess.main(dir_path, segment_list)
        print("checkpoint45")
        if self.tag_info["test_type"] == 1:
            self.case_tagging.tagMain(self.tag_info["global_tag"], dir_path,
                                      case_tagging_list)
Example #12
0
    def mainSegment(self, file_list, upload=False):
        '''
        :param dir_name: the dir to be process
        :param upload: whether upload , default as False
        :return: o -> right ; 1-> error
        '''
        print("checkpoint1000")
        print(self.input_data_path)
        dir_path = self.input_data_path
        # dir_path = os.path.join(self.input_data_path, dir_name)

        # check the dir whether right
        check_result, self.false_reason = self.checkRec(dir_path)
        print("calling checkRec")
        for dir_name in file_list:
            print("checkpoint1006")
            print(dir_name)
            print(dir_path)

            # get the data-tag.json of checked dir
            tag_data = self.deterDirProperty(dir_path)
            if not check_result:
                if 'test_type' in tag_data.keys(
                ) and tag_data['test_type'] == 3:
                    pass
                if "issue_id" not in tag_data.keys(
                ) or tag_data["issue_id"] == []:
                    tag_data["issue_id"] = ["feature"]
                if tag_data is not None and tag_data["issue_id"] != [
                        "repo_master"
                ] and tag_data["task_id"] != 30000:
                    pass
                else:
                    self.falseDataArchive(dir_path, self.false_reason)
                    return

            if "backup" in tag_data:
                tag_data = tag_data["backup"][0]["data_tag"]
            tag_data["test_date"] = tag_data["test_date"].replace('-', '_')
            if tag_data is None:
                return 1
            if not "global_tag" in tag_data:
                tag_data["global_tag"] = []

            ## generate evluation result with different module
            if tag_data["test_type"] == 1 and self.config_["evaluation"]:
                pool1 = multiprocessing.Pool(processes=12)
                pool1.apply_async(
                    generate_evaluation_result.generateLocalizationEval,
                    args=(
                        dir_path,
                        self.config_,
                    ))
                pool1.close()
                pool1.join()
                # try:
                #     generate_evaluation_result.generateTprofileEval(dir_path,self.config_)
                # except Exception as e:
                #     print getTime()+"\033[1;31m [ERROR]\033[0m tprofiling error "
            if "issue_id" not in tag_data.keys() or tag_data["issue_id"] == []:
                tag_data["issue_id"] = ["feature"]
                saveTag(dir_path, tag_data)

            self.tag_info = tag_data
            self.tag_info["localization_result"] = self.getLocalizationResult(
                dir_path.replace(" ", ""))
            self.tag_info["control_result"] = {}
            self.tag_info["ap_result"] = {}
            self.tag_info["file_name"] = dir_name
            self.tag_info["disk_id"] = self.getDiskId(dir_path)
            self.addEvalTag(dir_path)
            self.tag_info["origin_record_tag"] = self.deDuplication(
                self.tag_info["origin_record_tag"])
            saveTag(dir_path, self.tag_info)
            self.post = True
            segment, upload = self.judgeIfSegmentAndUpload(
                self.tag_info, check_result)

        try:
            if self.tag_info["test_type"] == 2 and self.tag_info[
                    "issue_id"] == ["AUTODRIVE-6814"]:
                overlap = self.reouteCheckFi(dir_path,
                                             self.tag_info["test_car_id"])
                self.tag_info['global_tag'].append(overlap)
                try:
                    self.tag_info = self.getObjectNumber(
                        dir_path, self.tag_info)
                except:
                    pass
                if None in self.tag_info['global_tag']:
                    self.tag_info['global_tag'].remove(None)
                saveTag(dir_path, self.tag_info)
            print("checkpoint29")
            print(dir_path)
            print(dir_name)
            if segment:
                print("checkpoint30")
                self.segmentPreprationCollection(dir_name)
                print("checkpoint31")
                print(dir_path)
                print(dir_name)
            print("checkpoint32")
            self.mainUpload(dir_path, upload)
            print("checkpoint33")
        except Exception as e:
            print(traceback.format_exc())
            logger(1, str(e), LOG_FILE="upload_list/error.log")
            print(getTime() +
                  "\033[1;31m [ERROR]\033[0m segment or upload error ")
            return 1
        return 0
Example #13
0
def main():
    statics_result ={"test_duration":0,
                     "test_mileage":0,
                     'route':{

                     },
                     "record_tag":{
                     }}
    vnumber = {
        "fewer_objects":0,
        "normal_objects":0,
        "many_objects":0
    }
    dir_path = "/media/sensetime/FieldTest1/data/04_02_CN-009_ARH/"
    tag_list = getMatchedFilePaths(dir_path,'module_*','.json',True)

    print(len(tag_list))
    result_list = {}
    i = 0
    for tag_path in tag_list:
        tag_data = loadTag(tag_path,'')
        if tag_data is None:
            continue

        for cpu_core in tag_data["module_eval"]:
            if cpu_core not in result_list:
                result_list[cpu_core]= {}
                result_list[cpu_core]["stddev_frame_rate"] =0
                result_list[cpu_core]["avg_frame_rate"] = 0
            if tag_data["module_eval"][cpu_core]["stddev_frame_rate"] is not None and \
                    tag_data["module_eval"][cpu_core]["avg_frame_rate"] is not None:
                result_list[cpu_core]["stddev_frame_rate"] = \
                    result_list[cpu_core]["stddev_frame_rate"]+tag_data["module_eval"][cpu_core]["stddev_frame_rate"]
                result_list[cpu_core]["avg_frame_rate"] = \
                    result_list[cpu_core]["avg_frame_rate"] + tag_data["module_eval"][cpu_core]["avg_frame_rate"]


        # if "test_duration" in tag_data.keys():
        #     statics_result["test_duration"]+=tag_data["test_duration"]
        # if "test_mileage" in tag_data.keys() and tag_data["test_mileage"]>0 and tag_data["test_mileage"]<200:
        #     statics_result["test_mileage"]+=tag_data["test_mileage"]
        # if "origin_record_tag" in tag_data.keys() and tag_data["origin_record_tag"] != []:
        #     for record_tag in tag_data["origin_record_tag"]:
        #         if not record_tag["tag_en"] in statics_result["record_tag"].keys():
        #             statics_result["record_tag"][record_tag["tag_en"]] = 0
        #         statics_result["record_tag"][record_tag["tag_en"]] +=1
        # if 'route' in tag_data.keys():
        #     if tag_data['route'] not in statics_result["route"]:
        #         statics_result["route"][tag_data['route']] =0
        #     statics_result["route"][tag_data['route']] += 1

        i+=1
        print(i)
    for core in result_list:
        result_list[core]["avg_frame_rate"] =  result_list[core]["avg_frame_rate"]/5
        result_list[core]["stddev_frame_rate"] = result_list[core]["stddev_frame_rate"] / 5
    saveTag(dir_path,result_list,'result.json')
        # if "global_tag" in tag_data.keys():
        #     for tag in tag_data["global_tag"]:
        #         if tag in ["fewer_objects","normal_objects","many_objects"]:
        #             vnumber[tag] +=1


    # with open('/media/sensetime/FieldTest1/data/result.txt', 'w') as fw:
    #     for path in result_list:
    #         fw.write(path + '\n')




    saveTag(dir_path,vnumber,'statics_result_12.json')
Example #14
0
def main():
    input_path = '/media/sensetime/FieldTest/data/local_eval/true_label'
    file_list = getAllDataDir(input_path)

    for file_name in file_list:
        eval_file_path = os.path.join(input_path, file_name,
                                      'localization_eval')
        tag_file_path = os.path.join(input_path, file_name)
        eval_info = loadTag(eval_file_path, 'evaluation_result.json')
        tag_info = loadTag(tag_file_path, '12.json')

        for tag in tag_info["tags"]:
            print(file_name)
            save_dict = {}
            start = int(str(int(tag["start"] * 1000))[0:11]) - 25
            if "end" in tag:
                end = int(str(int(tag["end"] * 1000))[0:11]) + 25
            else:
                end = start + 50
            for eval in eval_info["Tags"]:
                eval_start = int(str(int(eval["start"] * 1000))[0:11])
                eval_end = int(str(int(eval["end"] * 1000))[0:11])
                if not judgeIfOverLap(start, end, eval_start, eval_end):
                    continue

                if eval["tag_en"] == "longitudinal_jump":
                    if not eval["tag_en"] in save_dict:
                        save_dict[eval["tag_en"]] = eval[
                            "max_longitudinal_jump(m/s)"]
                    else:
                        if save_dict[eval["tag_en"]] < eval[
                                "max_longitudinal_jump(m/s)"]:
                            save_dict[eval["tag_en"]] = eval[
                                "max_longitudinal_jump(m/s)"]
                elif eval["tag_en"] == "lateral_jump":
                    if not eval["tag_en"] in save_dict:
                        save_dict[
                            eval["tag_en"]] = eval["max_lateral_jump(m/s)"]
                    else:
                        if save_dict[eval["tag_en"]] < eval[
                                "max_lateral_jump(m/s)"]:
                            save_dict[
                                eval["tag_en"]] = eval["max_lateral_jump(m/s)"]
                elif eval["tag_en"] == "heading_jump":
                    if not eval["tag_en"] in save_dict:
                        save_dict[
                            eval["tag_en"]] = eval["max_heading_jump(deg/s)"]
                    else:
                        if save_dict[eval["tag_en"]] < eval[
                                "max_heading_jump(deg/s)"]:
                            save_dict[eval["tag_en"]] = eval[
                                "max_heading_jump(deg/s)"]
                elif eval["tag_en"] == "large_longitudinal_error":
                    if not eval["tag_en"] in save_dict:
                        save_dict[
                            eval["tag_en"]] = eval["max_longitudinal_error(m)"]
                    else:
                        if save_dict[eval["tag_en"]] < eval[
                                "max_longitudinal_error(m)"]:
                            save_dict[eval["tag_en"]] = eval[
                                "max_longitudinal_error(m)"]
                elif eval["tag_en"] == "large_lateral_error":
                    if not eval["tag_en"] in save_dict:
                        save_dict[
                            eval["tag_en"]] = eval["max_lateral_error(m)"]
                    else:
                        if save_dict[
                                eval["tag_en"]] < eval["max_lateral_error(m)"]:
                            save_dict[
                                eval["tag_en"]] = eval["max_lateral_error(m)"]
                elif eval["tag_en"] == "large_heading_error":
                    error_num = "max_heading_error(m)"
                    for key in eval.keys():
                        if key.split('(')[0] == "max_heading_error":
                            error_num = key
                    if not eval["tag_en"] in save_dict:

                        save_dict[eval["tag_en"]] = eval[error_num]
                    else:
                        if save_dict[eval["tag_en"]] < eval[error_num]:
                            save_dict[eval["tag_en"]] = eval[error_num]
            saveTag(tag_file_path, save_dict,
                    tag["start_format"].replace(':', "_") + '.json')

        plot_dict = {
            "longitudinal_jump": [],
            "lateral_jump": [],
            "heading_jump": [],
            "large_longitudinal_error": [],
            "large_lateral_error": [],
            "large_heading_error": [],
            "large_time_gap": []
        }
        tag_file_path = os.path.join(input_path, file_name,
                                     'localization_eval')
        eval_tag = loadTag(tag_file_path, 'evaluation_result.json')
        if eval_tag is None:
            continue
        for tag in eval_tag["Tags"]:
            print(file_name)
            print(tag)
            st = int(str(int(tag["start"] * 1000))[0:11]) - 1599000000
            ed = int(str(int(tag["end"] * 1000))[0:11]) - 1599000000
            plot_dict[tag["tag_en"]].append([st, ed])
        plt = plotMap(plot_dict)
        plt.savefig(tag_file_path + "/tag_file.png", dpi=120)
def getTrueList(file_path, check_result=True):
    '''

    :param input_file_list:
    :param check:
    :return:
    '''
    # if not judge_file_data(file_path):
    #     return
    if not os.path.exists('../data_report'):
        os.makedirs('../data_report')
    data_report_tag = loadTag('../data_report/', formatted_today + '.json')
    date = str(formatted_today)
    total_file_size = 0
    total_file_time_length = 0

    if not date in data_report_tag.keys():
        data_report_tag[date] = defaultdict(lambda: {})

    file_name = file_path.split('/', -1)[-1]
    tag_data = loadTag(file_path)
    if tag_data == {}:
        return

    file_size = getFileSize(file_path)
    if 'test_duration' in tag_data.keys():
        file_time_length = tag_data.get('test_duration')
    else:
        file_time_length = 1800
    if not 'true' in data_report_tag[date]:
        data_report_tag[date]['true'] = defaultdict(lambda: {})
    if not file_name in data_report_tag[date]['true']:
        data_report_tag[date]['true'][file_name] = defaultdict(lambda: {})
    data_report_tag[date]['true'][file_name]["upload_date"] = date
    data_report_tag[date]['true'][file_name]["data_type"] = 'raw'
    data_report_tag[date]['true'][file_name]["data_name"] = file_name
    data_report_tag[date]['true'][file_name]["test_car_id"] = tag_data["test_car_id"]
    data_report_tag[date]['true'][file_name]["check_result"] = check_result
    data_report_tag[date]['true'][file_name]["file_size(MB)"] = file_size
    data_report_tag[date]['true'][file_name]["data_link"] = tag_data["data_link"]

    total_file_size += file_size
    total_file_time_length += file_time_length / 60

    print(file_path + '_slice')
    if os.path.exists(file_path + '_slice'):
        slice_file_size = getFileSize(file_path + '_slice')
        slice_file_name = file_name + '_slice'
        if not slice_file_name in data_report_tag[date]['true']:
            data_report_tag[date]['true'][slice_file_name] = defaultdict(lambda: {})
        data_report_tag[date]['true'][slice_file_name]["upload_date"] = date
        data_report_tag[date]['true'][slice_file_name]["data_type"] = 'segment'
        data_report_tag[date]['true'][slice_file_name]["data_name"] = slice_file_name
        data_report_tag[date]['true'][file_name]["test_car_id"] = tag_data["test_car_id"]
        data_report_tag[date]['true'][slice_file_name]["check_result"] = check_result
        data_report_tag[date]['true'][slice_file_name]["file_size(MB)"] = slice_file_size
        data_report_tag[date]['true'][slice_file_name]["data_link"] = tag_data["data_link"].replace('raw',
                                                                                                    'segment') + '_slice'

        total_file_size += slice_file_size

    file_size_str = str(check_result) + "_file_size(MB)"
    if not file_size_str in data_report_tag[date].keys():
        data_report_tag[date][file_size_str] = 0
    data_report_tag[date][file_size_str] += total_file_size

    time_length_str = "Test_length(Min)"
    if not time_length_str in data_report_tag[date].keys():
        data_report_tag[date][time_length_str] = 0
    data_report_tag[date][time_length_str] += total_file_time_length

    print("\033[1;32m [INFO]\033[0m! generate data report successfully\n")
    saveTag('../data_report/', data_report_tag, formatted_today + '.json')