Esempio n. 1
0
    def initOut(self):
        print '     Loading Data...'
        self.a_train_set = ADatasetFromFolder(
            sequence_dir, '../MOT/MOT16/test/MOT16-%02d' % self.seq_index)
        self.m_train_set = MDatasetFromFolder(
            sequence_dir, '../MOT/MOT16/test/MOT16-%02d' % self.seq_index)

        detection_dir = self.out_dir + 'res_training_det.txt'
        res_training = self.out_dir + 'res_training.txt'  # the result of the training data
        self.createTxt(detection_dir)
        self.createTxt(res_training)
        self.copyLines(self.seq_index, 1, detection_dir, self.tt, 1)

        self.evaluation(1, self.tt, detection_dir, res_training)
Esempio n. 2
0
    def initOut(self):
        print('     Loading Data...')
        self.a_train_set = ADatasetFromFolder(sequence_dir, mot_dataset_dir + 'MOT16/test/MOT16-%02d' % self.seq_index,
                                              tau_conf_score)
        self.m_train_set = MDatasetFromFolder(sequence_dir, mot_dataset_dir + 'MOT16/test/MOT16-%02d' % self.seq_index,
                                              tau_conf_score)

        detection_dir = self.out_dir + 'res_det.txt'
        res_training = self.out_dir + 'res.txt'  # the tracking results
        self.createTxt(detection_dir)
        self.createTxt(res_training)
        self.copyLines(self.seq_index, 1, detection_dir, self.seq_len, 1)

        self.evaluation(1, self.seq_len, detection_dir, res_training)
Esempio n. 3
0
    def initOut(self):
        for i in xrange(1, 9):
            start_t = time.time()
            self.camera = i
            self.bbx_counter = 0

            print '     Loading the model...'
            self.loadAModel()
            self.loadMModel()

            print '     Loading Data...'
            self.a_train_set = ADatasetFromFolder(i, self.tau_conf_score)
            self.m_train_set = MDatasetFromFolder(i, self.tau_conf_score)

            self.evaluation()
            print '     This camera:', self.camera, '- Time:', (time.time() - start_t)/60
Esempio n. 4
0
class GN():
    def __init__(self, seq_index, tt, a, cuda=True):
        '''
        Evaluating with the MotMetrics
        :param seq_index: the number of the sequence
        :param tt: train_test
        :param length: the number of frames which is used for training
        :param cuda: True - GPU, False - CPU
        '''
        self.bbx_counter = 0
        self.seq_index = seq_index
        self.hungarian = Munkres()
        self.device = torch.device("cuda" if cuda else "cpu")
        self.tt = tt
        self.alpha = a
        self.missingCounter = 0
        self.sideConnection = 0

        print '     Loading the model...'
        self.loadAModel()
        self.loadMModel()

        self.out_dir = t_dir + 'motmetrics_%s_show/' % (type)
        print '		', self.out_dir
        if not os.path.exists(self.out_dir):
            os.mkdir(self.out_dir)
        else:
            deleteDir(self.out_dir)
            os.mkdir(self.out_dir)
        self.initWin()
        self.initOut()

    def initWin(self):
        self.color = [(255, 0, 0), (0, 255, 0), (0, 0, 255)]
        self.img_dir = '../MOT/MOT16/test/MOT16-%02d/img1/' % self.seq_index
        self.pre_win = 'Show/p0'

        self.cur_win = 'Show/p1'

    def initOut(self):
        print '     Loading Data...'
        self.a_train_set = ADatasetFromFolder(
            sequence_dir, '../MOT/MOT16/test/MOT16-%02d' % self.seq_index,
            tau_conf_score)
        self.m_train_set = MDatasetFromFolder(
            sequence_dir, '../MOT/MOT16/test/MOT16-%02d' % self.seq_index,
            tau_conf_score)

        # gt_training = self.out_dir + 'gt_training.txt'  # the gt of the training data
        # self.copyLines(self.seq_index, 1, gt_training, self.tt)

        detection_dir = self.out_dir + 'res_training_det.txt'
        res_training = self.out_dir + 'res_training.txt'  # the result of the training data
        self.createTxt(detection_dir)
        self.createTxt(res_training)
        self.copyLines(self.seq_index, 1, detection_dir, self.tt, 1)

        self.evaluation(1, self.tt, detection_dir, res_training)

    def getSeqL(self, info):
        # get the length of the sequence
        f = open(info, 'r')
        f.readline()
        for line in f.readlines():
            line = line.strip().split('=')
            if line[0] == 'seqLength':
                seqL = int(line[1])
        f.close()
        return seqL

    def copyLines(self, seq, head, gt_seq, tail=-1, tag=0):
        '''
        Copy the groun truth within [head, head+num]
        :param seq: the number of the sequence
        :param head: the head frame number
        :param tail: the number the clipped sequence
        :param gt_seq: the dir of the output file
        :return: None
        '''
        if tt_tag:
            basic_dir = '../MOT/MOT%d/test/MOT%d-%02d-%s/' % (year, year, seq,
                                                              type)
        else:
            basic_dir = '../MOT/MOT%d/train/MOT%d-%02d-%s/' % (year, year, seq,
                                                               type)
        print '     Testing on', basic_dir, 'Length:', self.tt
        seqL = tail if tail != -1 else self.getSeqL(basic_dir + 'seqinfo.ini')

        det_dir = 'gt/gt_det.txt' if test_gt_det else 'det/det.txt'
        seq_dir = basic_dir + ('gt/gt.txt' if tag == 0 else det_dir)
        inStream = open(seq_dir, 'r')

        outStream = open(gt_seq, 'w')
        for line in inStream.readlines():
            line = line.strip()
            attrs = line.split(',')
            f_num = int(attrs[0])
            if f_num >= head and f_num <= seqL:
                print >> outStream, line
        outStream.close()

        inStream.close()
        return seqL

    def createTxt(self, out_file):
        f = open(out_file, 'w')
        f.close()

    def loadAModel(self):
        if edge_initial == 0:
            model_dir = 'App2_bb'
            name = '%s_7' % app_dir
            i_name = 'IoU'
        elif edge_initial == 1:
            model_dir = 'App2_bb'
            name = '%s_7' % app_dir
            i_name = 'Random'
        tail = 13
        self.AUphi = torch.load('../%s/Results/MOT16/%s/%s/uphi_%02d.pth' %
                                (model_dir, i_name, name, tail)).to(
                                    self.device)
        self.AVphi = torch.load('../%s/Results/MOT16/%s/%s/vphi_%02d.pth' %
                                (model_dir, i_name, name, tail)).to(
                                    self.device)
        self.AEphi1 = torch.load('../%s/Results/MOT16/%s/%s/ephi1_%02d.pth' %
                                 (model_dir, i_name, name, tail)).to(
                                     self.device)
        self.AEphi2 = torch.load('../%s/Results/MOT16/%s/%s/ephi2_%02d.pth' %
                                 (model_dir, i_name, name, tail)).to(
                                     self.device)
        self.Au = torch.load('../%s/Results/MOT16/%s/%s/u_%02d.pth' %
                             (model_dir, i_name, name, tail))
        self.Au = self.Au.to(self.device)

    def loadMModel(self):
        if edge_initial == 0:
            model_dir = 'Motion1_bb'
            name = 'all_7'
            i_name = 'IoU'
        elif edge_initial == 1:
            model_dir = 'Motion1_bb'
            name = 'all_7'
            i_name = 'Random'
        tail = 13
        self.MUphi = torch.load('../%s/Results/MOT16/%s/%s/uphi_%d.pth' %
                                (model_dir, i_name, name, tail)).to(
                                    self.device)
        self.MEphi = torch.load('../%s/Results/MOT16/%s/%s/ephi_%d.pth' %
                                (model_dir, i_name, name, tail)).to(
                                    self.device)
        self.Mu = torch.load('../%s/Results/MOT16/%s/%s/u_%d.pth' %
                             (model_dir, i_name, name, tail))
        self.Mu = self.Mu.to(self.device)

    def swapFC(self):
        self.cur = self.cur ^ self.nxt
        self.nxt = self.cur ^ self.nxt
        self.cur = self.cur ^ self.nxt

    def linearModel(self, out, attr1, attr2):
        # print 'I got you! *.*'
        t = attr1[-1]
        self.sideConnection += 1
        if t > f_gap:
            return
        frame = int(attr1[0])
        x1, y1, w1, h1 = float(attr1[2]), float(attr1[3]), float(
            attr1[4]), float(attr1[5])
        x2, y2, w2, h2 = float(attr2[2]), float(attr2[3]), float(
            attr2[4]), float(attr2[5])

        x_delta = (x2 - x1) / t
        y_delta = (y2 - y1) / t
        w_delta = (w2 - w1) / t
        h_delta = (h2 - h1) / 2

        for i in xrange(1, t):
            frame += 1
            x1 += x_delta
            y1 += y_delta
            w1 += w_delta
            h1 += h_delta
            attr1[0] = str(frame)
            attr1[2] = str(x1)
            attr1[3] = str(y1)
            attr1[4] = str(w1)
            attr1[5] = str(h1)
            line = ''
            for attr in attr1[:-1]:
                line += attr + ','
            if show_recovering:
                line += '1'
            else:
                line = line[:-1]
            print >> out, line
            self.bbx_counter += 1
        self.missingCounter += t - 1

    def evaluation(self, head, tail, gtFile, outFile):
        '''
        Evaluation on dets
        :param head: the head frame number
        :param tail: the tail frame number
        :param gtFile: the ground truth file name
        :param outFile: the name of output file
        :return: None
        '''
        gtIn = open(gtFile, 'r')
        self.cur, self.nxt = 0, 1

        imgs = [None, None]  # 0 - previous img, 1 - current img
        going_tag = 0  # 0 - frame by frame, 1 - goto going_f

        line_con = [[], []]
        id_con = [[], []]
        id_step = 1

        a_step = head + self.a_train_set.setBuffer(head)
        m_step = head + self.m_train_set.setBuffer(head)
        if a_step != m_step:
            print 'Something is wrong!'
            print 'a_step =', a_step, ', m_step =', m_step
            raw_input('Continue?')

        imgs[self.cur] = readImg(self.img_dir + '%06d.jpg' % a_step)
        going_f = a_step
        while a_step < tail:
            # print '*********************************'
            if going_f <= a_step:
                going_tag = 0

            a_t_gap = self.a_train_set.loadNext()
            m_t_gap = self.m_train_set.loadNext()
            if a_t_gap != m_t_gap:
                print 'Something is wrong!'
                print 'a_t_gap =', a_t_gap, ', m_t_gap =', m_t_gap
                raw_input('Continue?')
            a_step += a_t_gap
            m_step += m_step
            # print head+step, 'F',

            m_u_ = self.MUphi(self.m_train_set.E, self.m_train_set.V, self.Mu)

            # print 'Fo'
            a_m = self.a_train_set.m
            a_n = self.a_train_set.n
            m_m = self.m_train_set.m
            m_n = self.m_train_set.n

            if a_m != m_m or a_n != m_n:
                print 'Something is wrong!'
                print 'a_m = %d, m_m = %d' % (
                    a_m, m_m), ', a_n = %d, m_n = %d' % (a_n, m_n)
                raw_input('Continue?')
            # print 'm = %d, n = %d'%(m, n)
            if a_n == 0:
                print 'There is no detection in the rest of sequence!'
                break

            if id_step == 1:
                out = open(outFile, 'a')
                i = 0
                while i < a_m:
                    attrs = gtIn.readline().strip().split(',')
                    if float(attrs[6]) >= tau_conf_score:
                        attrs.append(1)
                        attrs[1] = str(id_step)
                        line = ''
                        for attr in attrs[:-1]:
                            line += attr + ','
                        if show_recovering:
                            line += '0'
                        else:
                            line = line[:-1]
                        print >> out, line
                        self.bbx_counter += 1

                        # draw the rectangle
                        x, y = int(float(attrs[2])), int(float(attrs[3]))
                        w, h = int(float(attrs[4])), int(float(attrs[5]))
                        cv2.rectangle(imgs[self.cur], (x, y), (x + w, y + h),
                                      self.color[0], 2)
                        cv2.putText(imgs[self.cur], attrs[1] + '_B',
                                    (x + 3, y + 15), font, 0.6, self.color[0],
                                    2, cv2.LINE_AA)

                        line_con[self.cur].append(attrs)
                        id_con[self.cur].append(id_step)
                        id_step += 1
                        i += 1
                out.close()

            print '     Frame:', a_step
            print id_con[self.cur]
            imgs[self.nxt] = readImg(self.img_dir + '%06d.jpg' % a_step)
            i = 0
            while i < a_n:
                attrs = gtIn.readline().strip().split(',')
                if float(attrs[6]) >= tau_conf_score:
                    # if int(attrs[0]) != a_step:
                    #     print attrs
                    #     print 'Something is Wrong! %d != %d'%(int(attrs[0]), a_step)
                    attrs.append(1)
                    line_con[self.nxt].append(attrs)
                    id_con[self.nxt].append(-1)
                    i += 1

            # update the edges
            # print 'T',
            candidates = []
            E_CON, V_CON = [], []
            for edge in self.a_train_set.candidates:
                e, vs_index, vr_index = edge
                e = e.view(1, -1).to(self.device)
                vs = self.a_train_set.getApp(1, vs_index)
                vr = self.a_train_set.getApp(0, vr_index)

                e1 = self.AEphi1(e, vs, vr, self.Au)
                vr1 = self.AVphi(e1, vs, vr, self.Au)
                candidates.append((e1, vs, vr1, vs_index, vr_index))
                E_CON.append(e1)
                V_CON.append(vs)
                V_CON.append(vr1)

            E = self.a_train_set.aggregate(E_CON).view(1, -1)
            V = self.a_train_set.aggregate(V_CON).view(1, -1)
            u1 = self.AUphi(E, V, self.Au)

            ret = self.a_train_set.getRet()
            decay_tag = [0 for i in xrange(a_m)]
            for i in xrange(a_m):
                for j in xrange(a_n):
                    if ret[i][j] == 0:
                        decay_tag[i] += 1

            for i in xrange(len(self.a_train_set.candidates)):
                e1, vs, vr1, a_vs_index, a_vr_index = candidates[i]
                m_e, m_vs_index, m_vr_index = self.m_train_set.candidates[i]
                if a_vs_index != m_vs_index or a_vr_index != m_vr_index:
                    print 'Something is wrong!'
                    print 'a_vs_index = %d, m_vs_index = %d' % (a_vs_index,
                                                                m_vs_index)
                    print 'a_vr_index = %d, m_vr_index = %d' % (a_vr_index,
                                                                m_vr_index)
                    raw_input('Continue?')

                # if id_con[self.cur][a_vs_index] == 6 and a_step == 38:
                #     print '     ******'
                #     print ' Pre-ret[%d][*]'%id_con[self.cur][a_vs_index], ret[a_vs_index][a_vr_index],

                if ret[a_vs_index][a_vr_index] == tau_threshold:
                    # if id_con[self.cur][a_vs_index] == 6 and a_step == 38:
                    #     print ''
                    continue

                e2 = self.AEphi2(e1, vs, vr1, u1)
                self.a_train_set.edges[a_vs_index][a_vr_index] = e1.data.view(
                    -1)

                a_tmp = F.softmax(e2)
                a_tmp = a_tmp.cpu().data.numpy()[0]

                m_e = m_e.to(self.device).view(1, -1)
                m_v1 = self.m_train_set.getMotion(1, m_vs_index)
                m_v2 = self.m_train_set.getMotion(
                    0, m_vr_index, m_vs_index,
                    line_con[self.cur][m_vs_index][-1])
                m_e_ = self.MEphi(m_e, m_v1, m_v2, m_u_)
                self.m_train_set.edges[m_vs_index][
                    m_vr_index] = m_e_.data.view(-1)
                m_tmp = F.softmax(m_e_)
                m_tmp = m_tmp.cpu().data.numpy()[0]

                t = line_con[self.cur][a_vs_index][-1]
                if decay_tag[a_vs_index] > 2:
                    A = min(float(a_tmp[0]) * pow(decay, t - 1), 1.0)
                    M = min(float(m_tmp[0]) * pow(decay, t - 1), 1.0)
                else:
                    A = float(a_tmp[0])
                    M = float(m_tmp[0])
                ret[a_vs_index][a_vr_index] = A * self.alpha + M * (1 -
                                                                    self.alpha)
                # if id_con[self.cur][a_vs_index] == 6 and a_step == 38:
                #     print ' Cur-ret[%d][*]'%id_con[self.cur][a_vs_index], ret[a_vs_index][a_vr_index]
                #     print ' The a is:%f, b is:%f' % (a_tmp[0], m_tmp[0])
                #     print ' The A is:%f, B is:%f' % (A, M)

            # self.a_train_set.showE(outFile)
            # self.m_train_set.showE(outFile)

            # for j in ret:
            #     print j
            results = self.hungarian.compute(ret)

            out = open(outFile, 'a')
            look_up = set(j for j in xrange(a_n))
            nxt = self.a_train_set.nxt
            for (i, j) in results:
                # print (i,j)
                if ret[i][j] >= tau_threshold:
                    continue
                e1 = self.a_train_set.edges[i][j].view(1, -1).to(self.device)
                vs = self.a_train_set.getApp(1, i)
                vr = self.a_train_set.getApp(0, j)

                vr1 = self.AVphi(e1, vs, vr, self.Au)
                self.a_train_set.detections[nxt][j][0] = vr1.data

                look_up.remove(j)
                self.m_train_set.updateVelocity(i, j,
                                                line_con[self.cur][i][-1],
                                                False)

                id = id_con[self.cur][i]
                id_con[self.nxt][j] = id
                attr1 = line_con[self.cur][i]
                attr2 = line_con[self.nxt][j]
                attr2[1] = str(id)
                if attr1[-1] > 1:
                    # for the missing detections & side connection
                    self.linearModel(out, attr1, attr2)
                line = ''
                for attr in attr2[:-1]:
                    line += attr + ','
                if show_recovering:
                    line += '0'
                else:
                    line = line[:-1]
                print >> out, line
                self.bbx_counter += 1

            if u_update:
                self.Mu = m_u_.data
                self.Au = u1.data

            for j in look_up:
                self.m_train_set.updateVelocity(-1, j, tag=False)

            for i in xrange(a_n):
                attrs = line_con[self.nxt][i]
                color = self.color[1]
                state = '_C'
                if id_con[self.nxt][i] == -1:
                    color = self.color[0]
                    state = '_B'
                    id_con[self.nxt][i] = id_step
                    attrs[1] = str(id_step)
                    line = ''
                    for attr in attrs[:-1]:
                        line += attr + ','
                    if show_recovering:
                        line += '0'
                    else:
                        line = line[:-1]
                    print >> out, line
                    self.bbx_counter += 1
                    id_step += 1

                # if i not in look_up:
                #     color = self.color[2]
                #     state = '_M'

                # draw the rectrangle
                x, y = int(float(attrs[2])), int(float(attrs[3]))
                w, h = int(float(attrs[4])), int(float(attrs[5]))
                cv2.rectangle(imgs[self.nxt], (x, y), (x + w, y + h), color, 2)
                cv2.putText(imgs[self.nxt], attrs[1] + state, (x + 3, y + 15),
                            font, 0.6, color, 2, cv2.LINE_AA)

            out.close()

            # visualization
            cv2.imwrite(self.pre_win + '.png', imgs[self.cur])
            cv2.imwrite(self.cur_win + '.png', imgs[self.nxt])
            if going_tag == 0:
                while True:
                    inp = raw_input('Input(enter for next iteration):')
                    print '     Your input is:', inp
                    state = 3
                    if len(inp):
                        state = 1
                        direction = int(
                            raw_input(
                                '1 For T -> (T-1), 2 For (T-1) -> T, 3 For next iteration:'
                            ))
                        if direction == 1:
                            # T -> T-1
                            print '     T -> (T-1)'
                            check = int(
                                raw_input('1 For Single, 2 - For Double'))
                            tag1, tag2 = False, False
                            if check == 1:
                                id1 = int(raw_input('Input the ID1:'))
                                print '     ', id1
                                for i in xrange(a_n):
                                    if id_con[self.nxt][i] == id1:
                                        id1 = i
                                        tag1 = True
                                        break
                                if tag1:
                                    for i in xrange(a_m):
                                        if ret[i][id1] < tau_threshold:
                                            print '     ', id_con[
                                                self.cur][i], ret[i][id1]
                                else:
                                    print 'The %d is not in the current frame!'
                            else:
                                string = raw_input('Input the ID1,ID2:')
                                string = string.split(',')
                                id1, id2 = int(string[0]), int(string[1])
                                print '     ', id1, id2
                                for i in xrange(a_n):
                                    if id_con[self.nxt][i] == id1:
                                        id1 = i
                                        tag1 = True
                                        break

                                for i in xrange(a_m):
                                    if id_con[self.cur][i] == id2:
                                        id2 = i
                                        tag2 = True
                                        break
                                if tag1 and tag2:
                                    print '     ', ret[id2][id1]
                                else:
                                    print 'The id1 or id2 is not in the list!'

                        elif direction == 2:
                            # T-1 -> T
                            print '        (T-1) -> T'
                            check = int(
                                raw_input('1 For Single, 2 - For Double'))
                            if check == 1:
                                id1 = int(raw_input('Input the ID1:'))
                                print '     ', id1
                                for i in xrange(a_m):
                                    if id_con[self.cur][i] == id1:
                                        id1 = i
                                        tag1 = True
                                        break
                                if tag1:
                                    for i in xrange(a_n):
                                        if ret[id1][i] < tau_threshold:
                                            print '     ', id_con[
                                                self.nxt][i], ret[id1][i]
                                else:
                                    print 'The %d is not in the previous frame!' % id1
                            else:
                                string = raw_input('Input the ID1,ID2:')
                                string = string.split(',')
                                id1, id2 = int(string[0]), int(string[1])
                                print '     ', id1, id2
                                for i in xrange(a_m):
                                    if id_con[self.cur][i] == id1:
                                        id1 = i
                                        tag1 = True
                                        break

                                for i in xrange(a_n):
                                    if id_con[self.nxt][i] == id2:
                                        id2 = i
                                        tag2 = True
                                        break
                                if tag1 and tag2:
                                    print '     ', ret[id1][id2]
                                else:
                                    print 'The id1 or id2 is not in the list!'
                        else:
                            state = 3
                            nxt_index = int(
                                raw_input(
                                    'The target frame(0 for next iteration):'))
                            if nxt_index:
                                going_f = nxt_index
                                going_tag = 1

                    if state == 3:
                        break

            # For missing & Occlusion
            index = 0
            for (i, j) in results:
                while i != index:
                    # if a_step > 80:
                    #     print id_con[self.cur][index], line_con[self.cur][index]
                    attrs = line_con[self.cur][index]
                    # print '*', attrs, '*'
                    if attrs[-1] + a_t_gap <= gap:
                        attrs[-1] += a_t_gap
                        line_con[self.nxt].append(attrs)
                        id_con[self.nxt].append(id_con[self.cur][index])
                        self.a_train_set.moveApp(index)
                        self.m_train_set.moveMotion(index)
                    index += 1

                if ret[i][j] >= tau_threshold:
                    # if a_step > 80:
                    #     print id_con[self.cur][index], line_con[self.cur][index]
                    attrs = line_con[self.cur][index]
                    # print '*', attrs, '*'
                    if attrs[-1] + a_t_gap <= gap:
                        attrs[-1] += a_t_gap
                        line_con[self.nxt].append(attrs)
                        id_con[self.nxt].append(id_con[self.cur][index])
                        self.a_train_set.moveApp(index)
                        self.m_train_set.moveMotion(index)

                index += 1
            while index < a_m:
                # if a_step > 80:
                #     print id_con[self.cur][index], line_con[self.cur][index]
                attrs = line_con[self.cur][index]
                # print '*', attrs, '*'
                if attrs[-1] + a_t_gap <= gap:
                    attrs[-1] += a_t_gap
                    line_con[self.nxt].append(attrs)
                    id_con[self.nxt].append(id_con[self.cur][index])
                    self.a_train_set.moveApp(index)
                    self.m_train_set.moveMotion(index)
                index += 1

            # con = self.m_train_set.cleanEdge()
            # for i in xrange(len(con)-1, -1, -1):
            #     index = con[i]
            #     del line_con[self.nxt][index]
            #     del id_con[self.nxt][index]

            line_con[self.cur] = []
            id_con[self.cur] = []
            cv2.imwrite('Show/%06d.png' % (a_step - 1), imgs[self.cur])
            imgs[self.cur] = []
            # print head+step, results
            self.a_train_set.swapFC()
            self.m_train_set.swapFC()
            self.swapFC()
        gtIn.close()
        print '     The results:', id_step, self.bbx_counter
Esempio n. 5
0
class GN():
    def __init__(self, seq_index, tt, a, cuda=True):
        '''
        Evaluating with the MotMetrics
        :param seq_index: the number of the sequence
        :param tt: train_test
        :param length: the number of frames which is used for training
        :param cuda: True - GPU, False - CPU
        '''
        self.bbx_counter = 0
        self.seq_index = seq_index
        self.hungarian = Munkres()
        self.device = torch.device("cuda" if cuda else "cpu")
        self.tt = tt
        self.alpha = a
        self.missingCounter = 0
        self.sideConnection = 0

        print '     Loading the model...'
        self.loadAModel()
        self.loadMModel()

        self.out_dir = t_dir + 'motmetrics_%s_v2_4_%.1f_%d/' % (type, a, 2)
        print '		', self.out_dir
        if not os.path.exists(self.out_dir):
            os.mkdir(self.out_dir)
        else:
            deleteDir(self.out_dir)
            os.mkdir(self.out_dir)
        self.initOut()

    def initOut(self):
        print '     Loading Data...'
        self.a_train_set = ADatasetFromFolder(
            sequence_dir, '../MOT/MOT16/train/MOT16-%02d' % self.seq_index)
        self.m_train_set = MDatasetFromFolder(
            sequence_dir, '../MOT/MOT16/train/MOT16-%02d' % self.seq_index)

        gt_training = self.out_dir + 'gt_training.txt'  # the gt of the training data
        self.copyLines(self.seq_index, 1, gt_training, self.tt)

        detection_dir = self.out_dir + 'res_training_det.txt'
        res_training = self.out_dir + 'res_training.txt'  # the result of the training data
        self.createTxt(detection_dir)
        self.createTxt(res_training)
        self.copyLines(self.seq_index, 1, detection_dir, self.tt, 1)

        self.evaluation(1, self.tt, detection_dir, res_training)

    def getSeqL(self, info):
        # get the length of the sequence
        f = open(info, 'r')
        f.readline()
        for line in f.readlines():
            line = line.strip().split('=')
            if line[0] == 'seqLength':
                seqL = int(line[1])
        f.close()
        return seqL

    def copyLines(self, seq, head, gt_seq, tail=-1, tag=0):
        '''
        Copy the groun truth within [head, head+num]
        :param seq: the number of the sequence
        :param head: the head frame number
        :param tail: the number the clipped sequence
        :param gt_seq: the dir of the output file
        :return: None
        '''
        if tt_tag:
            basic_dir = '../MOT/MOT%d/test/MOT%d-%02d-%s/' % (year, year, seq,
                                                              type)
        else:
            basic_dir = '../MOT/MOT%d/train/MOT%d-%02d-%s/' % (year, year, seq,
                                                               type)
        print '     Testing on', basic_dir, 'Length:', self.tt
        seqL = tail if tail != -1 else self.getSeqL(basic_dir + 'seqinfo.ini')

        det_dir = 'gt/gt_det.txt' if test_gt_det else 'det/det.txt'
        seq_dir = basic_dir + ('gt/gt.txt' if tag == 0 else det_dir)
        inStream = open(seq_dir, 'r')

        outStream = open(gt_seq, 'w')
        for line in inStream.readlines():
            line = line.strip()
            attrs = line.split(',')
            f_num = int(attrs[0])
            if f_num >= head and f_num <= seqL:
                print >> outStream, line
        outStream.close()

        inStream.close()
        return seqL

    def createTxt(self, out_file):
        f = open(out_file, 'w')
        f.close()

    def loadAModel(self):
        from mot_model import uphi, ephi
        if edge_initial == 0:
            model_dir = 'MOT'
            name = 'all_det_ft'
            i_name = 'IoU'
        elif edge_initial == 1:
            model_dir = 'Appearance'
            name = 'all_4_CE'
            i_name = 'Random'
        tail = 10
        self.AUphi = torch.load('../%s/Results/MOT16/%s/%s/uphi_%02d.pth' %
                                (model_dir, i_name, name, tail)).to(
                                    self.device)
        self.AEphi = torch.load('../%s/Results/MOT16/%s/%s/ephi_%02d.pth' %
                                (model_dir, i_name, name, tail)).to(
                                    self.device)
        self.Au = torch.load('../%s/Results/MOT16/%s/%s/u_%02d.pth' %
                             (model_dir, i_name, name, tail))
        self.Au = self.Au.to(self.device)

    def loadMModel(self):
        from m_mot_model import uphi, ephi
        if edge_initial == 0:
            model_dir = 'MOT_Motion'
            name = 'all_v2_4'
            i_name = 'IoU'
        elif edge_initial == 1:
            model_dir = 'Motion'
            name = 'all_4_CE'
            i_name = 'Random'
        tail = 10
        self.MUphi = torch.load('../%s/Results/MOT16/%s/%s/uphi_%d.pth' %
                                (model_dir, i_name, name, tail)).to(
                                    self.device)
        self.MEphi = torch.load('../%s/Results/MOT16/%s/%s/ephi_%d.pth' %
                                (model_dir, i_name, name, tail)).to(
                                    self.device)
        self.Mu = torch.load('../%s/Results/MOT16/%s/%s/u_%d.pth' %
                             (model_dir, i_name, name, tail))
        self.Mu = self.Mu.to(self.device)

    def swapFC(self):
        self.cur = self.cur ^ self.nxt
        self.nxt = self.cur ^ self.nxt
        self.cur = self.cur ^ self.nxt

    def linearModel(self, out, attr1, attr2):
        # print 'I got you! *.*'
        t = attr1[-1]
        self.sideConnection += 1
        if t > f_gap:
            return
        frame = int(attr1[0])
        x1, y1, w1, h1 = float(attr1[2]), float(attr1[3]), float(
            attr1[4]), float(attr1[5])
        x2, y2, w2, h2 = float(attr2[2]), float(attr2[3]), float(
            attr2[4]), float(attr2[5])

        x_delta = (x2 - x1) / t
        y_delta = (y2 - y1) / t
        w_delta = (w2 - w1) / t
        h_delta = (h2 - h1) / 2

        for i in xrange(1, t):
            frame += 1
            x1 += x_delta
            y1 += y_delta
            w1 += w_delta
            h1 += h_delta
            attr1[0] = str(frame)
            attr1[2] = str(x1)
            attr1[3] = str(y1)
            attr1[4] = str(w1)
            attr1[5] = str(h1)
            line = ''
            for attr in attr1[:-1]:
                line += attr + ','
            if show_recovering:
                line += '1'
            else:
                line = line[:-1]
            print >> out, line
            self.bbx_counter += 1
        self.missingCounter += t - 1

    def evaluation(self, head, tail, gtFile, outFile):
        '''
        Evaluation on dets
        :param head: the head frame number
        :param tail: the tail frame number
        :param gtFile: the ground truth file name
        :param outFile: the name of output file
        :return: None
        '''
        gtIn = open(gtFile, 'r')
        self.cur, self.nxt = 0, 1
        line_con = [[], []]
        id_con = [[], []]
        id_step = 1

        a_step = head + self.a_train_set.setBuffer(head)
        m_step = head + self.m_train_set.setBuffer(head)
        if a_step != m_step:
            print 'Something is wrong!'
            print 'a_step =', a_step, ', m_step =', m_step
            raw_input('Continue?')

        while a_step < tail:
            # print '*********************************'
            a_t_gap = self.a_train_set.loadNext()
            m_t_gap = self.m_train_set.loadNext()
            if a_t_gap != m_t_gap:
                print 'Something is wrong!'
                print 'a_t_gap =', a_t_gap, ', m_t_gap =', m_t_gap
                raw_input('Continue?')
            a_step += a_t_gap
            m_step += m_step
            # print head+step, 'F',

            a_u_ = self.AUphi(self.a_train_set.E, self.a_train_set.V, self.Au)
            m_u_ = self.MUphi(self.m_train_set.E, self.m_train_set.V, self.Mu)

            # print 'Fo'
            a_m = self.a_train_set.m
            a_n = self.a_train_set.n
            m_m = self.m_train_set.m
            m_n = self.m_train_set.n

            if a_m != m_m or a_n != m_n:
                print 'Something is wrong!'
                print 'a_m = %d, m_m = %d' % (
                    a_m, m_m), ', a_n = %d, m_n = %d' % (a_n, m_n)
                raw_input('Continue?')
            # print 'm = %d, n = %d'%(m, n)
            if a_n == 0:
                print 'There is no detection in the rest of sequence!'
                break

            if id_step == 1:
                out = open(outFile, 'a')
                i = 0
                while i < a_m:
                    attrs = gtIn.readline().strip().split(',')
                    if float(attrs[6]) >= tau_conf_score:
                        attrs.append(1)
                        attrs[1] = str(id_step)
                        line = ''
                        for attr in attrs[:-1]:
                            line += attr + ','
                        if show_recovering:
                            line += '0'
                        else:
                            line = line[:-1]
                        print >> out, line
                        self.bbx_counter += 1
                        line_con[self.cur].append(attrs)
                        id_con[self.cur].append(id_step)
                        id_step += 1
                        i += 1
                out.close()

            i = 0
            while i < a_n:
                attrs = gtIn.readline().strip().split(',')
                if float(attrs[6]) >= tau_conf_score:
                    attrs.append(1)
                    line_con[self.nxt].append(attrs)
                    id_con[self.nxt].append(-1)
                    i += 1

            # update the edges
            # print 'T',
            ret = self.a_train_set.getRet()
            for i in xrange(len(self.a_train_set.candidates)):
                a_e, a_vs_index, a_vr_index = self.a_train_set.candidates[i]
                m_e, m_vs_index, m_vr_index = self.m_train_set.candidates[i]
                if a_vs_index != m_vs_index or a_vr_index != m_vr_index:
                    print 'Something is wrong!'
                    print 'a_vs_index = %d, m_vs_index = %d' % (a_vs_index,
                                                                m_vs_index)
                    print 'a_vr_index = %d, m_vr_index = %d' % (a_vr_index,
                                                                m_vr_index)
                    raw_input('Continue?')
                if ret[a_vs_index][a_vr_index] == 1.0:
                    continue
                a_e = a_e.to(self.device).view(1, -1)
                a_v1 = self.a_train_set.getApp(1, a_vs_index)
                a_v2 = self.a_train_set.getApp(0, a_vr_index)
                a_e_ = self.AEphi(a_e, a_v1, a_v2, a_u_)
                self.a_train_set.edges[a_vs_index][
                    a_vr_index] = a_e_.data.view(-1)
                a_tmp = F.softmax(a_e_)
                a_tmp = a_tmp.cpu().data.numpy()[0]

                m_e = m_e.to(self.device).view(1, -1)
                m_v1 = self.m_train_set.getMotion(1, m_vs_index)
                m_v2 = self.m_train_set.getMotion(
                    0, m_vr_index, m_vs_index,
                    line_con[self.cur][m_vs_index][-1])
                m_e_ = self.MEphi(m_e, m_v1, m_v2, m_u_)
                self.m_train_set.edges[m_vs_index][
                    m_vr_index] = m_e_.data.view(-1)
                m_tmp = F.softmax(m_e_)
                m_tmp = m_tmp.cpu().data.numpy()[0]

                ret[a_vs_index][a_vr_index] = float(
                    a_tmp[0]) * self.alpha + float(m_tmp[0]) * (1 - self.alpha)

            # self.a_train_set.showE(outFile)
            # self.m_train_set.showE(outFile)

            # for j in ret:
            #     print j
            results = self.hungarian.compute(ret)

            out = open(outFile, 'a')
            look_up = set(j for j in xrange(a_n))
            for (i, j) in results:
                # print (i,j)
                if ret[i][j] >= tau_threshold:
                    continue
                look_up.remove(j)
                self.m_train_set.updateVelocity(i, j,
                                                line_con[self.cur][i][-1],
                                                False)

                id = id_con[self.cur][i]
                id_con[self.nxt][j] = id
                attr1 = line_con[self.cur][i]
                attr2 = line_con[self.nxt][j]
                # print attrs
                attr2[1] = str(id)
                if attr1[-1] > 1:
                    # for the missing detections
                    self.linearModel(out, attr1, attr2)
                line = ''
                for attr in attr2[:-1]:
                    line += attr + ','
                if show_recovering:
                    line += '0'
                else:
                    line = line[:-1]
                print >> out, line
                self.bbx_counter += 1

            for j in look_up:
                self.m_train_set.updateVelocity(-1, j, tag=False)

            for i in xrange(a_n):
                if id_con[self.nxt][i] == -1:
                    id_con[self.nxt][i] = id_step
                    attrs = line_con[self.nxt][i]
                    attrs[1] = str(id_step)
                    line = ''
                    for attr in attrs[:-1]:
                        line += attr + ','
                    if show_recovering:
                        line += '0'
                    else:
                        line = line[:-1]
                    print >> out, line
                    self.bbx_counter += 1
                    id_step += 1
            out.close()

            # For missing & Occlusion
            index = 0
            for (i, j) in results:
                while i != index:
                    attrs = line_con[self.cur][index]
                    # print '*', attrs, '*'
                    if attrs[-1] + a_t_gap <= gap:
                        attrs[-1] += a_t_gap
                        line_con[self.nxt].append(attrs)
                        id_con[self.nxt].append(id_con[self.cur][index])
                        self.a_train_set.moveApp(index)
                        self.m_train_set.moveMotion(index)
                    index += 1
                if ret[i][j] >= tau_threshold:
                    attrs = line_con[self.cur][index]
                    # print '*', attrs, '*'
                    if attrs[-1] + a_t_gap <= gap:
                        attrs[-1] += a_t_gap
                        line_con[self.nxt].append(attrs)
                        id_con[self.nxt].append(id_con[self.cur][index])
                        self.a_train_set.moveApp(index)
                        self.m_train_set.moveMotion(index)
                index += 1
            while index < a_m:
                attrs = line_con[self.cur][index]
                # print '*', attrs, '*'
                if attrs[-1] + a_t_gap <= gap:
                    attrs[-1] += a_t_gap
                    line_con[self.nxt].append(attrs)
                    id_con[self.nxt].append(id_con[self.cur][index])
                    self.a_train_set.moveApp(index)
                    self.m_train_set.moveMotion(index)
                index += 1

            # con = self.m_train_set.cleanEdge()
            # for i in xrange(len(con)-1, -1, -1):
            #     index = con[i]
            #     del line_con[self.nxt][index]
            #     del id_con[self.nxt][index]

            line_con[self.cur] = []
            id_con[self.cur] = []
            # print head+step, results
            self.a_train_set.swapFC()
            self.m_train_set.swapFC()
            self.swapFC()
        gtIn.close()
        print '     The results:', id_step, self.bbx_counter
Esempio n. 6
0
class GN():
    def __init__(self, cuda=True):
        '''
        Evaluating with the MotMetrics
        :param seq_index: the number of the sequence
        :param tt: train_test
        :param length: the number of frames which is used for training
        :param cuda: True - GPU, False - CPU
        '''
        self.hungarian = Munkres()
        self.device = torch.device("cuda" if cuda else "cpu")
        self.alpha = 0.7
        self.missingCounter = 0
        self.sideConnection = 0

        self.outName = 'Duke.txt'
        out = open(self.outName, 'w')
        out.close()

        self.tau_conf_score = 0.55
        self.initOut()

    def initOut(self):
        for i in xrange(1, 9):
            start_t = time.time()
            self.camera = i
            self.bbx_counter = 0

            print '     Loading the model...'
            self.loadAModel()
            self.loadMModel()

            print '     Loading Data...'
            self.a_train_set = ADatasetFromFolder(i, self.tau_conf_score)
            self.m_train_set = MDatasetFromFolder(i, self.tau_conf_score)

            self.evaluation()
            print '     This camera:', self.camera, '- Time:', (time.time() - start_t)/60

    def loadAModel(self):
        from mot_model import uphi, ephi, vphi
        model_dir = 'Duke_app2'
        name = 'Pretrained_4'
        # name = 'all_8'
        if edge_initial == 0:
            i_name = 'IoU'
        elif edge_initial == 1:
            i_name = 'Random'
        tail = 8
        self.AUphi = torch.load('../%s/Results/MOT16/%s/%s/uphi_%d.pth'%(model_dir, i_name, name, tail)).to(self.device)
        self.AVphi = torch.load('../%s/Results/MOT16/%s/%s/vphi_%d.pth'%(model_dir,i_name, name, tail)).to(self.device)
        self.AEphi1 = torch.load('../%s/Results/MOT16/%s/%s/ephi1_%d.pth'%(model_dir,i_name, name, tail)).to(self.device)
        self.AEphi2 = torch.load('../%s/Results/MOT16/%s/%s/ephi2_%d.pth'%(model_dir,i_name, name, tail)).to(self.device)
        self.Au = torch.load('../%s/Results/MOT16/%s/%s/u_%d.pth'%(model_dir,i_name, name, tail))
        self.Au = self.Au.to(self.device)

    def loadMModel(self):
        from m_mot_model import uphi, ephi
        model_dir = 'Duke_m'
        name = 'all'
        if edge_initial == 0:
            i_name = 'IoU'
        elif edge_initial == 1:
            i_name = 'Random'
        tail = 8
        self.MUphi = torch.load('../%s/Results/DukeMTMC/%s/%s/uphi_%d.pth'%(model_dir,i_name, name, tail)).to(self.device)
        self.MEphi = torch.load('../%s/Results/DukeMTMC/%s/%s/ephi_%d.pth'%(model_dir,i_name, name, tail)).to(self.device)
        self.Mu = torch.load('../%s/Results/DukeMTMC/%s/%s/u_%d.pth'%(model_dir,i_name, name, tail))
        self.Mu = self.Mu.to(self.device)

    def swapFC(self):
        self.cur = self.cur ^ self.nxt
        self.nxt = self.cur ^ self.nxt
        self.cur = self.cur ^ self.nxt

    def linearModel(self, out, attr1, attr2):
        # print 'I got you! *.*'
        self.sideConnection += 1
        frame, frame2 = int(attr1[2]), int(attr2[2])
        t = frame2 - frame
        if t > f_gap:
            return
        x1, y1, w1, h1 = float(attr1[3]), float(attr1[4]), float(attr1[5]), float(attr1[6])
        x2, y2, w2, h2 = float(attr2[3]), float(attr2[4]), float(attr2[5]), float(attr2[6])

        x_delta = (x2-x1)/t
        y_delta = (y2-y1)/t
        w_delta = (w2-w1)/t
        h_delta = (h2-h1)/t

        for i in xrange(1, t):
            frame += 1
            x1 += x_delta
            y1 += y_delta
            w1 += w_delta
            h1 += h_delta
            attr1[2] = str(frame)
            attr1[3] = str(x1)
            attr1[4] = str(y1)
            attr1[5] = str(w1)
            attr1[6] = str(h1)
            line = ''
            for attr in attr1[:-1]:
                line += attr + ','
            if show_recovering:
                line += '1'
            else:
                line = line[:-1]
            print >> out, line
            self.bbx_counter += 1
        self.missingCounter += t-1

    def evaluation(self):
        gtIn = open('Detections/%d.txt'%self.camera, 'r')
        self.cur, self.nxt = 0, 1
        line_con = [[], []]
        id_con = [[], []]
        id_step = 1

        head = 1
        a_step = head + self.a_train_set.setBuffer(head)
        m_step = head + self.m_train_set.setBuffer(head)
        if a_step != m_step:
            print 'Something is wrong!'
            print 'a_step =', a_step, ', m_step =', m_step
            raw_input('Continue?')

        while a_step <= SEQLEN:
            # print '*********************************'
            a_t_gap = self.a_train_set.loadNext()
            m_t_gap = self.m_train_set.loadNext()
            if a_t_gap != m_t_gap:
                print 'Something is wrong!'
                print 'a_t_gap =', a_t_gap, ', m_t_gap =', m_t_gap
                raw_input('Continue?')
            a_step += a_t_gap
            m_step += m_step
            if a_step > SEQLEN:
                break

            print a_step,
            if a_step % 1000 == 0:
                print ''
            # print head+step, 'F',

            m_u_ = self.MUphi(self.m_train_set.E, self.m_train_set.V, self.Mu)

            # print 'Fo'
            a_m = self.a_train_set.m
            a_n = self.a_train_set.n
            m_m = self.m_train_set.m
            m_n = self.m_train_set.n

            if a_m != m_m or a_n != m_n:
                print '\nSomething is wrong!'
                print 'a_m = %d, m_m = %d'%(a_m, m_m), ', a_n = %d, m_n = %d'%(a_n, m_n)
                print self.a_train_set.bbx[self.a_train_set.f_step]
                print self.m_train_set.bbx[self.m_train_set.f_step]
                raw_input('Continue?')
            # print 'm = %d, n = %d'%(m, n)
            if a_n==0:
                print 'There is no detection in the rest of sequence!'
                break

            if id_step == 1:
                out = open(self.outName, 'a')
                i = 0
                while i < a_m:
                    attrs = gtIn.readline().strip().split(',')
                    attrs.append(1)
                    attrs[1] = str(id_step)
                    line = ''
                    for attr in attrs[:-1]:
                        line += attr + ','
                    if show_recovering:
                        line += '0'
                    else:
                        line = line[:-1]
                    print >> out, line
                    self.bbx_counter += 1
                    line_con[self.cur].append(attrs)
                    id_con[self.cur].append(id_step)
                    id_step += 1
                    i += 1
                out.close()

            i = 0
            while i < a_n:
                attrs = gtIn.readline().strip().split(',')
                attrs.append(1)
                line_con[self.nxt].append(attrs)
                id_con[self.nxt].append(-1)
                i += 1

            # update the edges
            # print 'T',
            candidates = []
            E_CON, V_CON = [], []
            for edge in self.a_train_set.candidates:
                e, vs_index, vr_index = edge
                e = e.view(1, -1).to(self.device)
                vs = self.a_train_set.getApp(1, vs_index)
                vr = self.a_train_set.getApp(0, vr_index)

                e1 = self.AEphi1(e, vs, vr, self.Au)
                vr1 = self.AVphi(e1, vs, vr, self.Au)
                candidates.append((e1, vs, vr1, vs_index, vr_index))
                E_CON.append(e1)
                V_CON.append(vs)
                V_CON.append(vr1)

            E = self.a_train_set.aggregate(E_CON).view(1, -1)
            V = self.a_train_set.aggregate(V_CON).view(1, -1)
            u1 = self.AUphi(E, V, self.Au)

            u1 = torch.clamp(u1, max=1.0, min=-1.0)  # Clamp all elements in u1 into the range [ min, max ]

            ret = self.a_train_set.getRet()
            decay_tag = [0 for i in xrange(a_m)]
            for i in xrange(a_m):
                for j in xrange(a_n):
                    if ret[i][j] == 0:
                        decay_tag[i] += 1

            for i in xrange(len(self.a_train_set.candidates)):
                e1, vs, vr1, a_vs_index, a_vr_index = candidates[i]
                m_e, m_vs_index, m_vr_index = self.m_train_set.candidates[i]
                if a_vs_index != m_vs_index or a_vr_index != m_vr_index:
                    print 'Something is wrong!'
                    print 'a_vs_index = %d, m_vs_index = %d'%(a_vs_index, m_vs_index)
                    print 'a_vr_index = %d, m_vr_index = %d'%(a_vr_index, m_vr_index)
                    raw_input('Continue?')
                if ret[a_vs_index][a_vr_index] == tau_threshold:
                    continue

                e2 = self.AEphi2(e1, vs, vr1, u1)
                self.a_train_set.edges[a_vs_index][a_vr_index] = e1.data.view(-1)

                a_tmp = F.softmax(e2)
                a_tmp = a_tmp.cpu().data.numpy()[0]

                m_e = m_e.to(self.device).view(1,-1)
                m_v1 = self.m_train_set.getMotion(1, m_vs_index)
                m_v2 = self.m_train_set.getMotion(0, m_vr_index, m_vs_index)
                m_e_ = self.MEphi(m_e, m_v1, m_v2, m_u_)
                self.m_train_set.edges[m_vs_index][m_vr_index] = m_e_.data.view(-1)
                m_tmp = F.softmax(m_e_)
                m_tmp = m_tmp.cpu().data.numpy()[0]

                t = line_con[self.cur][a_vs_index][-1]
                # print a_tmp, m_tmp
                if decay_tag[a_vs_index] > 0:
                    try:
                        A = min(float(a_tmp[0]) * pow(decay, t + a_t_gap - 2), 1.0)
                        M = min(float(m_tmp[0]) * pow(decay, t + a_t_gap - 2), 1.0)
                    except OverflowError:
                        print 'OverflowError: (34, "Numerical result out of range")'
                        A = float(a_tmp[0])
                        M = float(m_tmp[0])
                else:
                    A = float(a_tmp[0])
                    M = float(m_tmp[0])
                ret[a_vs_index][a_vr_index] = A*self.alpha + M*(1-self.alpha)

            # self.a_train_set.showE(outFile)
            # self.m_train_set.showE(outFile)

            # for j in ret:
            #     print j

            results = self.hungarian.compute(ret)

            out = open(self.outName, 'a')
            look_up = set(j for j in xrange(a_n))
            nxt = self.a_train_set.nxt
            for (i, j) in results:
                # print (i,j)
                if ret[i][j] >= tau_threshold:
                    continue
                e1 = self.a_train_set.edges[i][j].view(1, -1).to(self.device)
                vs = self.a_train_set.getApp(1, i)
                vr = self.a_train_set.getApp(0, j)

                vr1 = self.AVphi(e1, vs, vr, self.Au)
                self.a_train_set.detections[nxt][j][0] = vr1.data

                look_up.remove(j)
                self.m_train_set.updateVelocity(i, j, False)

                id = id_con[self.cur][i]
                id_con[self.nxt][j] = id
                attr1 = line_con[self.cur][i]
                attr2 = line_con[self.nxt][j]
                # print attrs
                attr2[1] = str(id)
                # print attr1, attr2, a_t_gap
                if attr1[-1] + a_t_gap - 1 > 1:
                    # for the missing detections
                    self.linearModel(out, attr1, attr2)
                line = ''
                for attr in attr2[:-1]:
                    line += attr + ','
                if show_recovering:
                    line += '0'
                else:
                    line = line[:-1]
                print >> out, line
                self.bbx_counter += 1

            if u_update:
                self.Mu = m_u_.data
                self.Au = u1.data

            for j in look_up:
                self.m_train_set.updateVelocity(-1, j, False)

            for i in xrange(a_n):
                if id_con[self.nxt][i] == -1:
                    id_con[self.nxt][i] = id_step
                    attrs = line_con[self.nxt][i]
                    attrs[1] = str(id_step)
                    line = ''
                    for attr in attrs[:-1]:
                        line += attr + ','
                    if show_recovering:
                        line += '0'
                    else:
                        line = line[:-1]
                    print >> out, line
                    self.bbx_counter += 1
                    id_step += 1
            out.close()

            # For missing & Occlusion
            index = 0
            for (i, j) in results:
                while i != index:
                    attrs = line_con[self.cur][index]
                    # print '*', attrs, '*'
                    if attrs[-1] + a_t_gap <= gap:
                        attrs[-1] += a_t_gap
                        line_con[self.nxt].append(attrs)
                        id_con[self.nxt].append(id_con[self.cur][index])
                        self.a_train_set.moveApp(index)
                        self.m_train_set.moveMotion(index)
                    index += 1
                if ret[i][j] >= tau_threshold:
                    attrs = line_con[self.cur][index]
                    # print '*', attrs, '*'
                    if attrs[-1] + a_t_gap <= gap:
                        attrs[-1] += a_t_gap
                        line_con[self.nxt].append(attrs)
                        id_con[self.nxt].append(id_con[self.cur][index])
                        self.a_train_set.moveApp(index)
                        self.m_train_set.moveMotion(index)
                index += 1
            while index < a_m:
                attrs = line_con[self.cur][index]
                # print '*', attrs, '*'
                if attrs[-1] + a_t_gap <= gap:
                    attrs[-1] += a_t_gap
                    line_con[self.nxt].append(attrs)
                    id_con[self.nxt].append(id_con[self.cur][index])
                    self.a_train_set.moveApp(index)
                    self.m_train_set.moveMotion(index)
                index += 1

            # con = self.m_train_set.cleanEdge()
            # for i in xrange(len(con)-1, -1, -1):
            #     index = con[i]
            #     del line_con[self.nxt][index]
            #     del id_con[self.nxt][index]

            line_con[self.cur] = []
            id_con[self.cur] = []
            # print head+step, results
            self.a_train_set.swapFC()
            self.m_train_set.swapFC()
            self.swapFC()
        gtIn.close()
        print '     The results:', id_step, self.bbx_counter
Esempio n. 7
0
class GN():
    def __init__(self, seq_index, seq_len, cuda=True):
        '''
        Evaluating with the MotMetrics
        :param seq_index: the number of the sequence
        :param seq_len: the length of the sequence
        :param cuda: True - GPU, False - CPU
        '''
        self.bbx_counter = 0
        self.seq_index = seq_index
        self.hungarian = Munkres()
        self.device = torch.device("cuda" if cuda else "cpu")
        self.seq_len = seq_len
        self.alpha = 0.3
        self.missingCounter = 0
        self.sideConnection = 0

        print('     Loading the model...')
        self.loadAModel()
        self.loadMModel()

        self.out_dir = t_dir + 'motmetrics_%s/' % (type)

        print(self.out_dir)
        if not os.path.exists(self.out_dir):
            os.mkdir(self.out_dir)
        else:
            deleteDir(self.out_dir)
            os.mkdir(self.out_dir)
        self.initOut()

    def initOut(self):
        print('     Loading Data...')
        self.a_train_set = ADatasetFromFolder(sequence_dir, mot_dataset_dir + 'MOT16/test/MOT16-%02d' % self.seq_index,
                                              tau_conf_score)
        self.m_train_set = MDatasetFromFolder(sequence_dir, mot_dataset_dir + 'MOT16/test/MOT16-%02d' % self.seq_index,
                                              tau_conf_score)

        detection_dir = self.out_dir + 'res_det.txt'
        res_training = self.out_dir + 'res.txt'  # the tracking results
        self.createTxt(detection_dir)
        self.createTxt(res_training)
        self.copyLines(self.seq_index, 1, detection_dir, self.seq_len, 1)

        self.evaluation(1, self.seq_len, detection_dir, res_training)

    def getSeqL(self, info):
        # get the length of the sequence
        f = open(info, 'r')
        f.readline()
        for line in f.readlines():
            line = line.strip().split('=')
            if line[0] == 'seqLength':
                seqL = int(line[1])
        f.close()
        return seqL

    def copyLines(self, seq, head, gt_seq, tail=-1, tag=0):
        '''
        Copy the groun truth within [head, head+num]
        :param seq: the number of the sequence
        :param head: the head frame number
        :param tail: the number the clipped sequence
        :param gt_seq: the dir of the output file
        :return: None
        '''
        if tt_tag:
            basic_dir = mot_dataset_dir + 'MOT%d/test/MOT%d-%02d-%s/' % (year, year, seq, type)
        else:
            basic_dir = mot_dataset_dir + 'MOT%d/train/MOT%d-%02d-%s/' % (year, year, seq, type)
        print('     Testing on', basic_dir, 'Length:', self.seq_len)
        seqL = tail if tail != -1 else self.getSeqL(basic_dir + 'seqinfo.ini')

        det_dir = 'gt/gt_det.txt' if test_gt_det else 'det/det.txt'
        seq_dir = basic_dir + ('gt/gt.txt' if tag == 0 else det_dir)
        inStream = open(seq_dir, 'r')

        outStream = open(gt_seq, 'w')
        for line in inStream.readlines():
            line = line.strip()
            attrs = line.split(',')
            f_num = int(attrs[0])
            if f_num >= head and f_num <= seqL:
                outStream.write(line + '\n')
        outStream.close()

        inStream.close()
        return seqL

    def createTxt(self, out_file):
        f = open(out_file, 'w')
        f.close()

    def loadAModel(self):
        from mot_model import uphi, ephi, vphi
        tail = 13
        self.AUphi = torch.load('../App2/' + model_dir + 'uphi_%02d.pth' % tail).to(self.device)
        self.AVphi = torch.load('../App2/' + model_dir + 'vphi_%02d.pth' % tail).to(self.device)
        self.AEphi1 = torch.load('../App2/' + model_dir + 'ephi1_%02d.pth' % tail).to(self.device)
        self.AEphi2 = torch.load('../App2/' + model_dir + 'ephi2_%02d.pth' % tail).to(self.device)
        self.Au = torch.load('../App2/' + model_dir + 'u_%02d.pth' % tail).to(self.device)

    def loadMModel(self):
        from m_mot_model import uphi, ephi
        tail = 13
        self.MUphi = torch.load('../Motion1/' + model_dir + 'uphi_%d.pth' % tail).to(self.device)
        self.MEphi = torch.load('../Motion1/' + model_dir + 'ephi_%d.pth' % tail).to(self.device)
        self.Mu = torch.load('../Motion1/' + model_dir + 'u_%d.pth' % tail).to(self.device)

    def swapFC(self):
        self.cur = self.cur ^ self.nxt
        self.nxt = self.cur ^ self.nxt
        self.cur = self.cur ^ self.nxt

    def linearModel(self, out, attr1, attr2):
        # print 'I got you! *.*'
        t = attr1[-1]
        self.sideConnection += 1
        if t > f_gap:
            return
        frame = int(attr1[0])
        x1, y1, w1, h1 = float(attr1[2]), float(attr1[3]), float(attr1[4]), float(attr1[5])
        x2, y2, w2, h2 = float(attr2[2]), float(attr2[3]), float(attr2[4]), float(attr2[5])

        x_delta = (x2 - x1) / t
        y_delta = (y2 - y1) / t
        w_delta = (w2 - w1) / t
        h_delta = (h2 - h1) / t

        for i in range(1, t):
            frame += 1
            x1 += x_delta
            y1 += y_delta
            w1 += w_delta
            h1 += h_delta
            attr1[0] = str(frame)
            attr1[2] = str(x1)
            attr1[3] = str(y1)
            attr1[4] = str(w1)
            attr1[5] = str(h1)
            line = ''
            for attr in attr1[:-1]:
                line += attr + ','
            if show_recovering:
                line += '1'
            else:
                line = line[:-1]
            out.write(line + '\n')
            self.bbx_counter += 1
        self.missingCounter += t - 1

    def evaluation(self, head, tail, gtFile, outFile):
        '''
        Evaluation on dets
        :param head: the head frame number
        :param tail: the tail frame number
        :param gtFile: the ground truth file name
        :param outFile: the name of output file
        :return: None
        '''
        gtIn = open(gtFile, 'r')
        self.cur, self.nxt = 0, 1
        line_con = [[], []]
        id_con = [[], []]
        id_step = 1

        a_step = head + self.a_train_set.setBuffer(head)
        m_step = head + self.m_train_set.setBuffer(head)
        if a_step != m_step:
            print('Something is wrong!')
            print('a_step =', a_step, ', m_step =', m_step)
            input('Continue?')

        while a_step < tail:
            # print '*********************************'
            a_t_gap = self.a_train_set.loadNext()
            m_t_gap = self.m_train_set.loadNext()
            if a_t_gap != m_t_gap:
                print('Something is wrong!')
                print('a_t_gap =', a_t_gap, ', m_t_gap =', m_t_gap)
                input('Continue?')
            a_step += a_t_gap
            m_step += m_step
            print(a_step, end=' ')
            if a_step % 100 == 0:
                print('')

            m_u_ = self.MUphi(self.m_train_set.E, self.m_train_set.V, self.Mu)

            # print 'Fo'
            a_m = self.a_train_set.m
            a_n = self.a_train_set.n
            m_m = self.m_train_set.m
            m_n = self.m_train_set.n

            if a_m != m_m or a_n != m_n:
                print('Something is wrong!')
                print('a_m = %d, m_m = %d' % (a_m, m_m), ', a_n = %d, m_n = %d' % (a_n, m_n))
                input('Continue?')
            # print 'm = %d, n = %d'%(m, n)
            if a_n == 0:
                print('There is no detection in the rest of sequence!')
                break

            if id_step == 1:
                out = open(outFile, 'a')
                i = 0
                while i < a_m:
                    attrs = gtIn.readline().strip().split(',')
                    if float(attrs[6]) >= tau_conf_score:
                        attrs.append(1)
                        attrs[1] = str(id_step)
                        line = ''
                        for attr in attrs[:-1]:
                            line += attr + ','
                        if show_recovering:
                            line += '0'
                        else:
                            line = line[:-1]
                        out.write(line + '\n')
                        self.bbx_counter += 1
                        line_con[self.cur].append(attrs)
                        id_con[self.cur].append(id_step)
                        id_step += 1
                        i += 1
                out.close()

            i = 0
            while i < a_n:
                attrs = gtIn.readline().strip().split(',')
                if float(attrs[6]) >= tau_conf_score:
                    attrs.append(1)
                    line_con[self.nxt].append(attrs)
                    id_con[self.nxt].append(-1)
                    i += 1

            # update the edges
            # print 'T',
            candidates = []
            E_CON, V_CON = [], []
            for edge in self.a_train_set.candidates:
                e, vs_index, vr_index = edge
                e = e.view(1, -1).to(self.device)
                vs = self.a_train_set.getApp(1, vs_index)
                vr = self.a_train_set.getApp(0, vr_index)

                e1 = self.AEphi1(e, vs, vr, self.Au)
                vr1 = self.AVphi(e1, vs, vr, self.Au)
                candidates.append((e1, vs, vr1, vs_index, vr_index))
                E_CON.append(e1)
                V_CON.append(vs)
                V_CON.append(vr1)

            E = self.a_train_set.aggregate(E_CON).view(1, -1)
            V = self.a_train_set.aggregate(V_CON).view(1, -1)
            u1 = self.AUphi(E, V, self.Au)

            ret = self.a_train_set.getRet()
            decay_tag = [0 for i in range(a_m)]
            for i in range(a_m):
                for j in range(a_n):
                    if ret[i][j] == 0:
                        decay_tag[i] += 1

            for i in range(len(self.a_train_set.candidates)):
                e1, vs, vr1, a_vs_index, a_vr_index = candidates[i]
                m_e, m_vs_index, m_vr_index = self.m_train_set.candidates[i]
                if a_vs_index != m_vs_index or a_vr_index != m_vr_index:
                    print('Something is wrong!')
                    print('a_vs_index = %d, m_vs_index = %d' % (a_vs_index, m_vs_index))
                    print('a_vr_index = %d, m_vr_index = %d' % (a_vr_index, m_vr_index))
                    input('Continue?')
                if ret[a_vs_index][a_vr_index] == tau_threshold:
                    continue

                e2 = self.AEphi2(e1, vs, vr1, u1)
                self.a_train_set.edges[a_vs_index][a_vr_index] = e1.data.view(-1)

                a_tmp = F.softmax(e2)
                a_tmp = a_tmp.cpu().data.numpy()[0]

                m_e = m_e.to(self.device).view(1, -1)
                m_v1 = self.m_train_set.getMotion(1, m_vs_index)
                m_v2 = self.m_train_set.getMotion(0, m_vr_index, m_vs_index,
                                                  line_con[self.cur][m_vs_index][-1] + a_t_gap - 1)
                m_e_ = self.MEphi(m_e, m_v1, m_v2, m_u_)
                self.m_train_set.edges[m_vs_index][m_vr_index] = m_e_.data.view(-1)
                m_tmp = F.softmax(m_e_)
                m_tmp = m_tmp.cpu().data.numpy()[0]

                t = line_con[self.cur][a_vs_index][-1]
                if decay_tag[a_vs_index] > 0:
                    A = min(float(a_tmp[0]) * pow(decay, t - 1), 1.0)
                    M = min(float(m_tmp[0]) * pow(decay, t - 1), 1.0)
                else:
                    A = float(a_tmp[0])
                    M = float(m_tmp[0])
                ret[a_vs_index][a_vr_index] = A * self.alpha + M * (1 - self.alpha)

            # for j in ret:
            #     print j
            results = self.hungarian.compute(ret)

            out = open(outFile, 'a')
            look_up = set(j for j in range(a_n))
            nxt = self.a_train_set.nxt
            for (i, j) in results:
                # print (i,j)
                if ret[i][j] >= tau_threshold:
                    continue
                e1 = self.a_train_set.edges[i][j].view(1, -1).to(self.device)
                vs = self.a_train_set.getApp(1, i)
                vr = self.a_train_set.getApp(0, j)

                vr1 = self.AVphi(e1, vs, vr, self.Au)
                self.a_train_set.detections[nxt][j][0] = vr1.data

                look_up.remove(j)
                self.m_train_set.updateVelocity(i, j, line_con[self.cur][i][-1], False)

                id = id_con[self.cur][i]
                id_con[self.nxt][j] = id
                attr1 = line_con[self.cur][i]
                attr2 = line_con[self.nxt][j]
                # print attrs
                attr2[1] = str(id)
                if attr1[-1] + a_t_gap - 1 > 1:
                    # for the missing detections
                    self.linearModel(out, attr1, attr2)
                line = ''
                for attr in attr2[:-1]:
                    line += attr + ','
                if show_recovering:
                    line += '0'
                else:
                    line = line[:-1]
                out.write(line + '\n')
                self.bbx_counter += 1

            if u_update:
                self.Mu = m_u_.data
                self.Au = u1.data

            for j in look_up:
                self.m_train_set.updateVelocity(-1, j, tag=False)

            for i in range(a_n):
                if id_con[self.nxt][i] == -1:
                    id_con[self.nxt][i] = id_step
                    attrs = line_con[self.nxt][i]
                    attrs[1] = str(id_step)
                    line = ''
                    for attr in attrs[:-1]:
                        line += attr + ','
                    if show_recovering:
                        line += '0'
                    else:
                        line = line[:-1]
                    out.write(line + '\n')
                    self.bbx_counter += 1
                    id_step += 1
            out.close()

            # For missing & Occlusion
            index = 0
            for (i, j) in results:
                while i != index:
                    attrs = line_con[self.cur][index]
                    # print '*', attrs, '*'
                    if attrs[-1] + a_t_gap <= gap:
                        attrs[-1] += a_t_gap
                        line_con[self.nxt].append(attrs)
                        id_con[self.nxt].append(id_con[self.cur][index])
                        self.a_train_set.moveApp(index)
                        self.m_train_set.moveMotion(index)
                    index += 1
                if ret[i][j] >= tau_threshold:
                    attrs = line_con[self.cur][index]
                    # print '*', attrs, '*'
                    if attrs[-1] + a_t_gap <= gap:
                        attrs[-1] += a_t_gap
                        line_con[self.nxt].append(attrs)
                        id_con[self.nxt].append(id_con[self.cur][index])
                        self.a_train_set.moveApp(index)
                        self.m_train_set.moveMotion(index)
                index += 1
            while index < a_m:
                attrs = line_con[self.cur][index]
                # print '*', attrs, '*'
                if attrs[-1] + a_t_gap <= gap:
                    attrs[-1] += a_t_gap
                    line_con[self.nxt].append(attrs)
                    id_con[self.nxt].append(id_con[self.cur][index])
                    self.a_train_set.moveApp(index)
                    self.m_train_set.moveMotion(index)
                index += 1

            # con = self.m_train_set.cleanEdge()
            # for i in range(len(con)-1, -1, -1):
            #     index = con[i]
            #     del line_con[self.nxt][index]
            #     del id_con[self.nxt][index]

            line_con[self.cur] = []
            id_con[self.cur] = []
            # print head+step, results
            self.a_train_set.swapFC()
            self.m_train_set.swapFC()
            self.swapFC()
        gtIn.close()
        print('     The results:', id_step, self.bbx_counter)