def my_main(_config):

    print(_config)

    dataset = "mot_train_"
    detections = "SDP"

    ##########################
    # Initialize the modules #
    ##########################
    
    print("[*] Beginning evaluation...")
    module_dir = get_output_dir('videos')
    results_dir = osp.join(module_dir, 'results')
    module_dir = osp.join(module_dir, 'normal_new')
    #output_dir = osp.join(results_dir, 'plots')
    #if not osp.exists(output_dir):
    #    os.makedirs(output_dir)

    #sequences_raw = ["MOT17-13", "MOT17-11", "MOT17-10", "MOT17-09", "MOT17-05", "MOT17-04", "MOT17-02", ]
    
    #sequences = ["{}-{}".format(s, detections) for s in sequences_raw]
    #sequences = sequences[:1]
    
    tracker = ["FRCNN_Base", "HAM_SADF17", "MOTDT17", "EDMT17", "IOU17", "MHT_bLSTM", "FWT_17", "jCC", "MHT_DAM_17"]
    tracker = ["Baseline", "BnW", "FWT_17", "jCC", "MOTDT17", "MHT_DAM_17"]
    tracker = ["Baseline", "BnW", "FWT_17", "jCC", "MOTDT17"]
    
    for t in tracker:
        print("[*] Evaluating {}".format(t))
        for db in Datasets(dataset):
            ################################
            # Make videos for each tracker #
            ################################

            s = "{}-{}".format(db, detections)

            gt_file = osp.join(cfg.DATA_DIR, "MOT17Labels", "train", s, "gt", "gt.txt")
            res_file = osp.join(results_dir, t, s+".txt")

            stDB = read_txt_to_struct(res_file)
            gtDB = read_txt_to_struct(gt_file)
            
            gtDB, distractor_ids = extract_valid_gt_data(gtDB)
            _, M, gtDB, stDB = evaluate_new(stDB, gtDB, distractor_ids)

            st_ids = np.unique(stDB[:, 1])
            gt_ids = np.unique(gtDB[:, 1])
            gt_frames = np.unique(gtDB[:, 0])
            f_gt = len(gt_frames)

            gt_inds = [{} for i in range(f_gt)]
            st_inds = [{} for i in range(f_gt)]

            # hash the indices to speed up indexing
            for i in range(gtDB.shape[0]):
                frame = np.where(gt_frames == gtDB[i, 0])[0][0]
                gid = np.where(gt_ids == gtDB[i, 1])[0][0]
                gt_inds[frame][gid] = i

            gt_frames_list = list(gt_frames)
            for i in range(stDB.shape[0]):
                # sometimes detection missed in certain frames, thus should be assigned to groundtruth frame id for alignment
                frame = gt_frames_list.index(stDB[i, 0])
                sid = np.where(st_ids == stDB[i, 1])[0][0]
                st_inds[frame][sid] = i
            
            # set all ids of st to -1
            #stDB[:,1] = -1

            # set all results to corresponding gt id
            #for frame in range(f_gt):
            #    m = M[frame]
            #    for gid, sid in m.items():
            #        gt_row = gt_inds[frame][gid]
            #        gt_id = int(gtDB[gt_row, 1])
            #        st_row = st_inds[frame][sid]
            #        stDB[st_row, 1] = gt_id

            output_dir = osp.join(module_dir, t, s)
            if not osp.exists(output_dir):
                os.makedirs(output_dir)

            print("[*] Plotting whole sequence to {}".format(output_dir))

            # infinte color loop
            cyl = cy('ec', colors)
            loop_cy_iter = cyl()
            styles = defaultdict(lambda : next(loop_cy_iter))

            for frame,v in enumerate(db,1):
                im_path = v['im_path']
                im_name = osp.basename(im_path)
                im_output = osp.join(output_dir, im_name)
                im = cv2.imread(im_path)
                im = im[:, :, (2, 1, 0)]

                sizes = np.shape(im)
                height = float(sizes[0])
                width = float(sizes[1])

                fig = plt.figure()
                #fig.set_size_inches(w,h)
                #fig.set_size_inches(width/height, 1, forward=False)
                #fig.set_size_inches(width/100, height/100)
                scale = width/640
                #fig.set_size_inches(640/100, height*scale/100)
                fig.set_size_inches(width/100, height/100)
                ax = plt.Axes(fig, [0., 0., 1., 1.])
                ax.set_axis_off()
                fig.add_axes(ax)
                ax.imshow(im)

                st_frame = stDB[stDB[:,0]==frame]

                for j in range(st_frame.shape[0]):
                    box = st_frame[j,2:6]
                    gt_id = st_frame[j,1]
                    ax.add_patch(
                        plt.Rectangle((box[0], box[1]),
                            box[2] - box[0],
                            box[3] - box[1], fill=False,
                            linewidth=1.3*scale, **styles[gt_id])
                    )

                plt.axis('off')
                #plt.tight_layout()
                plt.draw()
                plt.savefig(im_output, dpi=100)
                plt.close()
def my_main(_config, cnn):
    print(_config)

    ##########################
    # Initialize the modules #
    ##########################
    print("[*] Building CNN")

    network = resnet50(pretrained=True, **cnn['cnn'])
    network.load_state_dict(torch.load(weights))
    network.eval()
    network.cuda()

    #########################
    # Initialize dataloader #
    #########################
    print("[*] Initializing Dataloader")

    output_dir = osp.join(get_output_dir('MOT_analysis'), 'siamese_dist')
    if not osp.exists(output_dir):
        os.makedirs(output_dir)

    results = []

    for db in Datasets("mot_train_", {'vis_threshold': 0.5}):
        print("[*] Evaluating {}".format(db))
        data = db.data
        data = build_samples(data)

        results_seq = []

        for person in data:

            images = []
            times = []

            transformation = Compose([
                ToTensor(),
                Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
            ])
            for sample in person:
                im = cv2.cvtColor(sample[1], cv2.COLOR_BGR2RGB)
                im = Image.fromarray(im)
                im = transformation(im)
                images.append(im)
                times.append(sample[0])
            images = torch.stack(images, 0)

            embeddings = network(Variable(images.cuda(),
                                          volatile=True)).data.cpu()

            n = embeddings.size(0)
            m = embeddings.size(0)
            d = embeddings.size(1)

            x = embeddings.unsqueeze(1).expand(n, m, d)
            y = embeddings.unsqueeze(0).expand(n, m, d)

            dist = torch.sqrt(torch.pow(x - y, 2).sum(2))

            res = []

            for i in range(n):
                for j in range(n):
                    if i < j:
                        res_x = times[j] - times[i]
                        res_y = dist[i, j]
                        if res_x <= 100:
                            res.append([res_x, res_y])
            results_seq += res

        results += results_seq
        #r = np.array(results_seq)

    # build values for plot
    r = np.array(results)
    x_max = 100
    x_val = np.arange(1, x_max + 1)
    y_val = np.zeros(x_max)
    y_std = np.zeros(x_max)

    for x in x_val:
        vals = r[r[:, 0] == x, 1]
        mean = np.mean(vals)
        y_val[x - 1] = mean
        y_std[x - 1] = np.sqrt(np.mean((vals - mean)**2))
    #plt.scatter(x_val, y_val, s=1**2)
    plt.errorbar(x_val, y_val, yerr=y_std, fmt='o')
    plt.xlabel('frames distance')
    plt.ylabel('feature distance')
    plt.xlim((0, 100))

    # calculate variance
    #var_step = 10
    #x_var = np.arange(var_step/2, x_max, 10)
    #y_var = np.zeros(x_max//var_step)
    #for x in x_var:
    #    vals = r[(r[:,0] > x-var_step/2) * (r[:,0] <= x+var_step/2), 1]

    #    y_val[x-1] = y

    #plt.errorbar(x, y, yerr=yerr, fmt='o')
    #plt.ylim((0,10))
    #plt.savefig(osp.join(output_dir, "{}-{}.pdf".format(t, detections)), format='pdf')
    #plt.close()

    #plt.legend()
    plt.savefig(osp.join(output_dir, "dist_err.pdf"), format='pdf')
    plt.close()
def my_main(_config):

    print(_config)

    ##########################
    # Initialize the modules #
    ##########################

    print("[*] Beginning evaluation...")
    output_dir = osp.join(get_output_dir('MOT_analysis'), 'occlusion')
    if not osp.exists(output_dir):
        os.makedirs(output_dir)

    sequences_raw = [
        "MOT17-13",
        "MOT17-11",
        "MOT17-10",
        "MOT17-09",
        "MOT17-05",
        "MOT17-04",
        "MOT17-02",
    ]
    detections = "DPM"
    sequences = ["{}-{}".format(s, detections) for s in sequences_raw]

    tracker = [
        "FRCNN", "DMAN", "HAM_SADF17", "MOTDT17", "EDMT17", "IOU17",
        "MHT_bLSTM", "FWT_17", "jCC", "MHT_DAM_17"
    ]
    #tracker = ["FRCNN"]
    # "PHD_GSDL17" does not work, error
    #tracker = tracker[-4:]

    for t in tracker:
        print("[*] Evaluating {}".format(t))
        coverage = []
        id_recovered = []
        tr_id_recovered = []
        for s in sequences:
            ########################################
            # Get DPM / GT coverage for each track #
            ########################################

            gt_file = osp.join(cfg.DATA_DIR, "MOT17Labels", "train", s, "gt",
                               "gt.txt")
            det_file = osp.join(cfg.DATA_DIR, "MOT17Labels", "train", s, "det",
                                "det.txt")
            res_file = osp.join(output_dir, t, s + ".txt")

            #gtDB = read_txt_to_struct(gt_file)
            #gtDB = gtDB[gtDB[:,7] == 1]

            stDB = read_txt_to_struct(res_file)
            gtDB = read_txt_to_struct(gt_file)

            gtDB, distractor_ids = extract_valid_gt_data(gtDB)

            _, M = evaluate_new(stDB, gtDB, distractor_ids)

            gt_frames = np.unique(gtDB[:, 0])
            st_ids = np.unique(stDB[:, 1])
            gt_ids = np.unique(gtDB[:, 1])
            f_gt = len(gt_frames)
            n_gt = len(gt_ids)
            n_st = len(st_ids)

            gt_inds = [{} for i in range(f_gt)]
            st_inds = [{} for i in range(f_gt)]

            # hash the indices to speed up indexing
            for i in range(gtDB.shape[0]):
                frame = np.where(gt_frames == gtDB[i, 0])[0][0]
                gid = np.where(gt_ids == gtDB[i, 1])[0][0]
                gt_inds[frame][gid] = i

            # Loop thorugh all gt and find gaps (visibility < 0.5)
            visible = [[0 for j in range(f_gt)] for i in range(n_gt)
                       ]  # format visible[track][frame] = {0,1}
            for gid in range(n_gt):
                for frame in range(f_gt):
                    if gid in gt_inds[frame]:
                        line = gt_inds[frame][gid]
                        vis = gtDB[line, 8]
                        #print(vis, frame, gid)
                        if vis >= 0.5:
                            visible[gid][frame] = 1

            # Find gaps in the tracks
            gt_tracked = {}
            for f, v in enumerate(M):
                for gt in v.keys():
                    if gt not in gt_tracked:
                        gt_tracked[gt] = []
                    gt_tracked[gt].append(f)

            for gid, times in gt_tracked.items():
                times = np.array(times)
                for i in range(len(times) - 1):
                    t0 = times[i]
                    t1 = times[i + 1]
                    if t1 == t0 + 1:
                        continue

                    last_non_empty = -1
                    for j in range(t0, -1, -1):
                        if gid in M[j].keys():
                            last_non_empty = j
                            break
                    next_non_empty = -1
                    for j in range(t1, f_gt):
                        if gid in M[j]:
                            next_non_empty = j
                            break

                    if next_non_empty != -1 and last_non_empty != -1:
                        sid0 = M[last_non_empty][gid]
                        sid1 = M[next_non_empty][gid]
                        if sid1 == sid0:
                            tr_id_recovered.append([t1 - t0 - 1, 1])
                        else:
                            tr_id_recovered.append([t1 - t0 - 1, 0])
            """for gid in range(n_gt):
                f0 = -1
                count = 0
                for frame in range(f_gt):
                    if gid in gt_inds[frame]:
                        vis = gtDB[gt_inds[frame][gid], 8]
                        if vis < 0.5 and f0 != -1:
                            count += 1
                        elif vis >= 0.5:
                            if count != 0:
                                print("Gap found {} - {} ({})".format(gid, frame, count))
                                count = 0
                            # set to current frame
                            f0 = frame"""

            # Now iterate through the tracks and check if covered / id kept in comparison to occlusion
            for gid, vis in enumerate(visible):
                f0 = -1
                count = 0
                n_cov = 0
                for frame, v in enumerate(vis):
                    if v == 0 and f0 != -1:
                        count += 1
                        if gid in M[frame].keys():
                            n_cov += 1
                    elif v == 1:
                        # gap ended
                        if count != 0:
                            coverage.append([count, n_cov])

                            last_non_empty = -1
                            for j in range(f0, -1, -1):
                                if gid in M[j].keys():
                                    last_non_empty = j
                                    break
                            next_non_empty = -1
                            for j in range(f0 + count + 1, f_gt):
                                if gid in M[j]:
                                    next_non_empty = j
                                    break

                            if next_non_empty != -1 and last_non_empty != -1:
                                sid0 = M[last_non_empty][gid]
                                sid1 = M[next_non_empty][gid]
                                if sid1 == sid0:
                                    id_recovered.append([count, 1])
                                else:
                                    id_recovered.append([count, 0])
                            count = 0
                            n_cov = 0
                        # set to current frame
                        f0 = frame

        coverage = np.array(coverage)
        id_recovered = np.array(id_recovered)
        tr_id_recovered = np.array(tr_id_recovered)

        #for c in coverage:
        #    print(c)
        xmax = 50

        # build values for plot
        x_val = np.arange(1, xmax + 1)
        y_val = np.zeros(xmax)

        for x in x_val:
            y = np.mean(coverage[coverage[:, 0] == x, 1] /
                        coverage[coverage[:, 0] == x, 0])
            y_val[x - 1] = y

        #plt.plot([0,1], [0,1], 'r-')
        plt.figure()
        plt.scatter(coverage[:, 0], coverage[:, 1] / coverage[:, 0], s=2**2)
        plt.plot(x_val, y_val, 'rx')
        plt.xlabel('gap length')
        plt.xlim((0, xmax))
        plt.ylabel('tracker coverage')
        plt.savefig(osp.join(output_dir,
                             "{}-{}-{}.pdf".format(t, detections, 'GAP_COV')),
                    format='pdf')

        # build values for plot
        x_val = np.arange(1, xmax + 1)
        y_val = np.zeros(xmax)

        for x in x_val:
            y = np.mean(id_recovered[id_recovered[:, 0] == x, 1])
            y_val[x - 1] = y

        plt.figure()
        plt.plot(x_val, y_val, 'rx')
        plt.scatter(id_recovered[:, 0], id_recovered[:, 1], s=2**2)
        plt.xlabel('gt gap length')
        plt.xlim((0, xmax))
        plt.ylabel('part id recovered')
        plt.savefig(osp.join(output_dir,
                             "{}-{}-{}.pdf".format(t, detections, 'GAP_ID')),
                    format='pdf')
        plt.close()

        # tr id recovered
        x_val = np.arange(1, xmax + 1)
        y_val = np.zeros(xmax)

        for x in x_val:
            y = np.mean(tr_id_recovered[tr_id_recovered[:, 0] == x, 1])
            y_val[x - 1] = y

        plt.figure()
        plt.plot(x_val, y_val, 'rx')
        plt.scatter(tr_id_recovered[:, 0], tr_id_recovered[:, 1], s=2**2)
        plt.xlabel('track gap length')
        plt.xlim((0, xmax))
        plt.ylabel('part id recovered')
        plt.savefig(osp.join(output_dir,
                             "{}-{}-{}.pdf".format(t, detections,
                                                   'GAP_TR_ID')),
                    format='pdf')
        plt.close()
def my_main(_config):

    print(_config)

    ##########################
    # Initialize the modules #
    ##########################

    print("[*] Beginning evaluation...")
    output_dir = osp.join(get_output_dir('MOT_analysis'), 'coverage')

    sequences_raw = [
        "MOT17-13",
        "MOT17-11",
        "MOT17-10",
        "MOT17-09",
        "MOT17-05",
        "MOT17-04",
        "MOT17-02",
    ]
    detections = "DPM"
    sequences = ["{}-{}".format(s, detections) for s in sequences_raw]

    tracker = [
        "FRCNN", "DMAN", "HAM_SADF17", "MOTDT17", "EDMT17", "IOU17",
        "MHT_bLSTM", "FWT_17", "jCC", "MHT_DAM_17"
    ]
    #tracker = ["DMAN"]

    for t in tracker:
        print("[*] Evaluating {}".format(t))
        data_points = []
        for s in sequences:
            ########################################
            # Get DPM / GT coverage for each track #
            ########################################

            gt_file = osp.join(cfg.DATA_DIR, "MOT17Labels", "train", s, "gt",
                               "gt.txt")
            dpm_file = osp.join(cfg.DATA_DIR, "MOT17Labels", "train", s, "det",
                                "det.txt")

            gtDB = read_txt_to_struct(gt_file)
            dpmDB = read_txt_to_struct(dpm_file)

            gtDB, distractor_ids = extract_valid_gt_data(gtDB)
            dpmDB, gtDB = preprocessingDB(dpmDB, gtDB, distractor_ids, 0.5, 0)

            gt_ids = np.unique(gtDB[:, 1])

            gt_ges = {int(i): 0 for i in gt_ids}
            gt_matched = {int(i): 0 for i in gt_ids}
            gt_tracked = {int(i): 0 for i in gt_ids}

            track_frames = np.unique(dpmDB[:, 0])
            gt_frames = np.unique(gtDB[:, 0])
            nframes = min(len(track_frames), len(gt_frames))
            res_keep = np.ones((dpmDB.shape[0], ), dtype=float)
            for i in range(1, nframes + 1):
                # find all result boxes in this frame
                res_in_frame = np.where(dpmDB[:, 0] == i)[0]
                res_in_frame_data = dpmDB[res_in_frame, :]
                gt_in_frame = np.where(gtDB[:, 0] == i)[0]
                gt_in_frame_data = gtDB[gt_in_frame, :]

                #for gt in gt_in_frame_data:
                #    gt_ges[int(gt[1])] += 1

                res_num = res_in_frame.shape[0]
                gt_num = gt_in_frame.shape[0]
                overlaps = np.zeros((res_num, gt_num), dtype=float)
                for gid in range(gt_num):
                    overlaps[:, gid] = bbox_overlap(res_in_frame_data[:, 2:6],
                                                    gt_in_frame_data[gid, 2:6])
                matched_indices = linear_assignment(1 - overlaps)
                for matched in matched_indices:
                    # overlap lower than threshold, discard the pair
                    if overlaps[matched[0], matched[1]] > 0.5:
                        gt_id = int(gt_in_frame_data[matched[1], 1])
                        gt_matched[gt_id] += 1

            for k in gt_ids:
                gt_ges[k] = len(np.where(gtDB[:, 1] == k)[0])
                gt_tracked[k] = gt_matched[k] / gt_ges[k]

            res_file = osp.join(output_dir, t, s + ".txt")

            gtDB = read_txt_to_struct(gt_file)
            trackDB = read_txt_to_struct(res_file)
            gtDB, distractor_ids = extract_valid_gt_data(gtDB)
            trackDB, gtDB = preprocessingDB(trackDB, gtDB, distractor_ids, 0.5,
                                            0)

            tr_matched = {int(i): 0 for i in gt_ids}
            tr_tracked = {int(i): 0 for i in gt_ids}

            track_frames = np.unique(trackDB[:, 0])
            gt_frames = np.unique(gtDB[:, 0])
            nframes = min(len(track_frames), len(gt_frames))
            res_keep = np.ones((trackDB.shape[0], ), dtype=float)
            for i in range(1, nframes + 1):
                # find all result boxes in this frame
                res_in_frame = np.where(trackDB[:, 0] == i)[0]
                res_in_frame_data = trackDB[res_in_frame, :]
                gt_in_frame = np.where(gtDB[:, 0] == i)[0]
                gt_in_frame_data = gtDB[gt_in_frame, :]

                res_num = res_in_frame.shape[0]
                gt_num = gt_in_frame.shape[0]
                overlaps = np.zeros((res_num, gt_num), dtype=float)
                for gid in range(gt_num):
                    overlaps[:, gid] = bbox_overlap(res_in_frame_data[:, 2:6],
                                                    gt_in_frame_data[gid, 2:6])
                matched_indices = linear_assignment(1 - overlaps)
                for matched in matched_indices:
                    # overlap lower than threshold, discard the pair
                    if overlaps[matched[0], matched[1]] > 0.5:
                        gt_id = int(gt_in_frame_data[matched[1], 1])
                        tr_matched[gt_id] += 1

            for k in gt_ids:
                data_points.append([gt_tracked[k], tr_matched[k] / gt_ges[k]])

        data_points = np.array(data_points)
        # add mean values
        grid_step = 0.02
        grid = np.arange(-grid_step / 2, 1.0 + grid_step, grid_step)
        x_mean = np.arange(0.0, 1.0 + grid_step, grid_step)
        bins = int(1.0 / grid_step) + 1
        y_mean = np.zeros(bins)
        y_std = np.zeros(bins)
        for i in range(bins):
            vals = (data_points[:, 0] >= grid[i]) * (data_points[:, 0] <
                                                     grid[i + 1])
            mean = np.mean(data_points[vals, 1])
            y_mean[i] = mean
            y_std[i] = np.sqrt(np.mean((vals - mean)**2))

        y_poly = np.poly1d(np.polyfit(x_mean, y_mean, 5))
        x_new = np.linspace(0, 1, num=101, endpoint=True)

        area = simps(y_poly(x_new), x_new)

        plt.plot(x_new, y_poly(x_new), label="{} {:.3f}".format(t, area))
        #plt.errorbar(x_mean, y_poly(x_mean), yerr=y_std, fmt='o')
        #if t == "FRCNN":
        #    plt.plot(x_mean, y_mean)
        #plt.plot([0,1], [0,1], 'r-')
        #plt.scatter(data_points[:,0], data_points[:,1], s=2**2)
        #plt.xlabel('{} coverage'.format(detections))
        #plt.ylabel('tracker coverage')
        #plt.savefig(osp.join(output_dir, "{}-{}.pdf".format(t, detections)), format='pdf')
        #plt.close()

    plt.plot([0, 1], [0, 1], 'r-')
    plt.legend()
    plt.xlabel('{} coverage'.format(detections))
    plt.ylabel('tracker coverage')
    plt.savefig(osp.join(output_dir, "coverage-{}.pdf".format(detections)),
                format='pdf')
    plt.close()
def my_main(oracle_tracker, siamese, _config):
    # set all seeds
    torch.manual_seed(oracle_tracker['seed'])
    torch.cuda.manual_seed(oracle_tracker['seed'])
    np.random.seed(oracle_tracker['seed'])
    torch.backends.cudnn.deterministic = True

    print(_config)

    ##########################
    # Initialize the modules #
    ##########################

    print("[*] Building FRCNN")

    if oracle_tracker['network'] == 'vgg16':
        frcnn = vFRCNN()
    elif oracle_tracker['network'] == 'res101':
        frcnn = rFRCNN(num_layers=101)
    else:
        raise NotImplementedError("Network not understood: {}".format(
            oracle_tracker['network']))

    frcnn.create_architecture(2,
                              tag='default',
                              anchor_scales=frcnn_cfg.ANCHOR_SCALES,
                              anchor_ratios=frcnn_cfg.ANCHOR_RATIOS)
    frcnn.eval()
    frcnn.cuda()
    frcnn.load_state_dict(torch.load(oracle_tracker['frcnn_weights']))

    cnn = resnet50(pretrained=False, **siamese['cnn'])
    cnn.load_state_dict(torch.load(oracle_tracker['cnn_weights']))
    cnn.eval()
    cnn.cuda()
    tracker = Tracker(frcnn=frcnn, cnn=cnn)

    output_dir = osp.join(get_output_dir(oracle_tracker['module_name']),
                          oracle_tracker['name'])

    sacred_config = osp.join(output_dir, 'sacred_config.yaml')

    if not osp.exists(output_dir):
        os.makedirs(output_dir)
    with open(sacred_config, 'w') as outfile:
        yaml.dump(_config, outfile, default_flow_style=False)

    print("[*] Beginning evaluation...")

    time_ges = 0

    #train = ["MOT17-13", "MOT17-11", "MOT17-10", "MOT17-09", "MOT17-05", "MOT17-04", "MOT17-02", ]

    analysis_results = []

    for db in Datasets(oracle_tracker['dataset']):
        tracker.reset()

        now = time.time()

        print("[*] Evaluating: {}".format(db))

        #db = MOT_Sequence(s)

        dl = DataLoader(db, batch_size=1, shuffle=False)
        for sample in dl:
            tracker.step(sample)
        results, debug = tracker.get_results()

        if oracle_tracker['interpolate']:
            results = interpolate(results)

        #print("[!] Killed {} tracks by NMS".format(tracker.nms_killed))

        print("Tracks found: {}".format(len(results)))
        print("[!] Killed {} tracks by NMS".format(tracker.nms_killed))
        #print("[*] Time needed for {} evaluation: {:.3f} s".format(s, time.time() - now))

        #db.write_results(results, osp.join(output_dir))

        #if oracle_tracker['write_images']:
        #    plot_sequence(results, db, osp.join(output_dir, s))
        gt_file = osp.join(cfg.DATA_DIR, "MOT17Labels", "train",
                           str(db) + "-DPM", "gt", "gt.txt")
        (switches, gt_mean_height, gt_mean_vis, missed_height, missed_vis,
         missed_dist) = evaluate_new(results, gt_file)

        #with open(osp.join(output_dir, str(s)+"_switches.txt"), "w") as of:
        #    for t,sw in enumerate(switches):
        #        of.write("{}:       {}\n".format(t, sw))

        with open(osp.join(output_dir, str(db) + "_debug.txt"), "w") as of:
            for i, track in debug.items():
                of.write("Track id: {}\n".format(i))
                for im_index, data in track.items():
                    of.write("Frame: {}\n".format(im_index))
                    of.write("Pos: {}\n".format(data["pos"]))
                    of.write("{}".format(data["info"]))
                of.write("\n\n")

        sizes = []
        gt_ids = []
        gt_heights = []
        gt_viss = []
        reason_loose = {'NMS': 0, 'score': 0, 'regression': 0}
        reason_find = {'created': 0, 'reid': 0, 'regression': 0}

        with open(osp.join(output_dir, str(db) + "_sum.txt"), "w") as of:
            #for f1,sw in enumerate(switches, 1):
            for f1, sw in switches.items():
                of.write("[*] Frame: {}\n".format(f1))
                for t1, (t0, f0, gt_id, gt_height, gt_vis) in sw.items():
                    gt_heights.append(gt_height)
                    gt_ids.append(gt_id)
                    gt_viss.append(gt_vis)
                    of.write(
                        "ID switch from track {} to {} with size {}\n".format(
                            t0, t1, gt_height))
                    #of.write("Old Track: {}\n".format(debug[t0-1][f0-1]["pos"]))
                    #of.write("{}".format(debug[t0-1][f0-1]["info"]))
                    #debug[t1-1][f1-1]["pos"][3]-debug[t1-1][f1-1]["pos"][1]
                    of.write("Old Track:\n")
                    for frame, value in debug[t0 - 1].items():
                        if frame == f0 - 1 or frame == f0 or frame == f0 + 1:
                            of.write("{} {}\n{}".format(
                                frame, value["pos"], value["info"]))
                        if frame == f0:
                            if "too low score" in value["info"]:
                                reason_loose['score'] += 1
                            elif "NMS" in value["info"]:
                                reason_loose['NMS'] += 1
                            elif "Regressing" in value["info"]:
                                reason_loose['regression'] += 1
                    #of.write("{} {}\n{}".format(f0, debug[t0-1][f0-1]["pos"], debug[t0-1][f0-1]["info"]))
                    #of.write("{} {}\n{}".format(f0, debug[t0-1][f0]["pos"], debug[t0-1][f0]["info"]))
                    #of.write("{} {}\n{}".format(f0, debug[t0][f0-1]["pos"], debug[t0][f0-1]["info"]))

                    of.write("\nNew Track:\n")
                    for frame, value in debug[t1 - 1].items():
                        if frame == f1 - 1 or frame == f1 - 2 or frame == f1 - 3:
                            of.write("{} {}\n{}".format(
                                frame, value["pos"], value["info"]))
                        if frame == f1 - 1:
                            if "Created" in value['info']:
                                reason_find['created'] += 1
                            elif "ReIded" in value["info"]:
                                reason_find["reid"] += 1
                            elif "Regressing" in value["info"]:
                                reason_find['regression'] += 1
                    #of.write("{} {}\n{}".format(f1, debug[t1-1][f1-1]["pos"], debug[t1-1][f1-1]["info"]))

                    #of.write("New Track: {}\n".format(debug[t1-1][f1-1]["pos"]))
                    #of.write("{}\n".format(debug[t1-1][f1-1]["info"]))
                    of.write("\n")
                of.write("\n\n")
            #of.write("Sizes: {}\n".format(sizes))
        print("[*] Number of ID switches: {}".format(len(gt_ids)))
        print("[*] Number of gt targets involved in ID switches: {}".format(
            len(np.unique(gt_ids))))
        print("[*] Mean height of ID switches: {}".format(np.mean(gt_heights)))
        print("[*] Mean visibility of ID switches targets: {}".format(
            np.mean(gt_viss)))
        print("[*] Mean height of tracked boxes: {}".format(
            np.mean(gt_mean_height)))
        print("[*] Mean visibility of tracked boxes: {}".format(
            np.mean(gt_mean_vis)))
        print("[*] Reasons for ID swtiches:")
        print("\tLoose:")
        for k, v in reason_loose.items():
            print("\t\t{}: {}".format(k, v))
        print("\tFind:")
        for k, v in reason_find.items():
            print("\t\t{}: {}".format(k, v))
        print(gt_ids)

        detections = oracle_tracker['dataset'].split("_")[2]
        ar = [
            str(db) + "-" + detections,
            len(gt_ids), tracker.nms_killed, reason_loose['score'],
            reason_loose['NMS'], reason_loose['regression'],
            reason_find['created'], reason_find["reid"],
            reason_find['regression'],
            np.mean(gt_mean_height),
            np.mean(gt_mean_vis),
            np.mean(gt_heights),
            np.mean(gt_viss), missed_height, missed_vis, missed_dist
        ]

        analysis_results.append(ar)

        print(ar)

        db.write_results(results, output_dir)

        if oracle_tracker['write_images']:
            plot_sequence(results, db, osp.join(output_dir, str(db)))

    # print results to csv
    file = osp.join(output_dir, oracle_tracker['dataset'])
    with open(file, "w") as of:
        writer = csv.writer(of, delimiter=' ')
        for row in analysis_results:
            row_new = list(map(lambda x: str(x).replace(".", ","), row))
            writer.writerow(row_new)