Example #1
0
    def handle_seq(self, x_seq, lookup_seq, PedsList_seq, angle):
        # add noise and rotate a trajectory
        vectorized_x_seq, first_values_dict = vectorize_seq(
            x_seq, PedsList_seq, lookup_seq)
        modified_x_seq = vectorized_x_seq.clone()
        mean = torch.FloatTensor([self.noise_mean, self.noise_mean])
        stddev = torch.FloatTensor([self.noise_std, self.noise_std])
        origin = (0, 0)

        for ind, frame in enumerate(vectorized_x_seq):
            for ped in PedsList_seq[ind]:
                selected_point = frame[lookup_seq[ped], :]
                # rotate a frame point
                rotated_point = rotate(origin, selected_point,
                                       math.radians(angle))
                noise = torch.normal(mean, stddev).clone()
                # add random noise
                modified_x_seq[ind, lookup_seq[ped],
                               0] = rotated_point[0] + noise[0]
                modified_x_seq[ind, lookup_seq[ped],
                               1] = rotated_point[1] + noise[1]
                # modified_x_seq[ind, lookup_seq[ped], :] = torch.cat(rotate(origin, first_values_dict[ped], math.radians(angle))) + modified_x_seq[ind, lookup_seq[ped], :]
                # roatate first frame value as well and add it back to get absoute coordinates
                modified_x_seq[ind, lookup_seq[ped], 0] = (rotate(
                    origin, first_values_dict[ped], math.radians(
                        angle)))[0] + modified_x_seq[ind, lookup_seq[ped], 0]
                modified_x_seq[ind, lookup_seq[ped], 1] = (rotate(
                    origin, first_values_dict[ped], math.radians(
                        angle)))[1] + modified_x_seq[ind, lookup_seq[ped], 1]

        return modified_x_seq
Example #2
0
def rotate_traj(traj_x, traj_y, angle):
    # rotate trajectory
    origin = (0, 0)
    for p_index in range(len(traj_x)):
        rotated_points = rotate(origin, (traj_x[p_index], traj_y[p_index]), angle)
        traj_x[p_index] = rotated_points[0]
        traj_y[p_index] = rotated_points[1]
Example #3
0
def test(model, optimizer, test_loader, use_cuda, device, epoch, layers=1):
    model.eval()
    test_loss = 0.
    correct = 0.
    with torch.no_grad():
        for data, target in test_loader:
            data = helper.rotate(data, cuda=use_cuda)
            data, target = data.to(device), target.to(device)
            if layers == 1:
                output, h_conv1, h_fc1 = model(data)
            elif layers == 2:
                output, h_conv1, h_conv2, h_fc1 = model(data)
            elif layers == 3:
                output, h_conv1, h_conv2, h_conv3, h_fc1 = model(data)
            elif layers == 4:
                output, h_conv1, h_conv2, h_conv3, h_conv4, h_fc1 = model(data)
            test_loss += F.nll_loss(output, target, size_average=False).item() # sum up batch loss
            pred = output.max(1, keepdim=True)[1] # get the index of the max log-probability
            correct += pred.eq(target.view_as(pred)).sum().item()

    test_loss /= len(test_loader.dataset)
    print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(test_loss, correct, len(test_loader.dataset), 100. * correct / len(test_loader.dataset)))
    return (test_loss, correct / len(test_loader.dataset))
Example #4
0
def train(model, optimizer, train_loader, use_cuda, device, epoch, layers=1):
    """
    Train the given n (between 1 and 4) layer CNN for 1 epoch and print performance
    """
    model.train()
    for batch_idx, (data, target) in enumerate(train_loader):
        data = helper.rotate(data, cuda=use_cuda)
        data, target = data.to(device), target.to(device)
        optimizer.zero_grad()
        if layers == 1:
            output, h_conv1, h_fc1 = model(data)
        elif layers == 2:
            output, h_conv1, h_conv2, h_fc1 = model(data)
        elif layers == 3:
            output, h_conv1, h_conv2, h_conv3, h_fc1 = model(data)
        elif layers == 4:
            output, h_conv1, h_conv2, h_conv3, h_conv4, h_fc1 = model(data)

        loss = F.nll_loss(output, target)
        loss.backward()
        optimizer.step()
        if batch_idx % 10 == 0:
            print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(epoch, batch_idx * len(data), len(train_loader.dataset), 100. * batch_idx / len(train_loader), loss.item()))
Example #5
0
def processDatabase(dataset,
                    names,
                    deg=0,
                    scale=1.0,
                    min_score_thresh=0.7,
                    showImg=True):
    """run face detection for named dataset as names.
    dataset:
    names:
    deg: angle (anti-clockwise)
    """
    if dataset == "headPose":
        import readheadPose
        d = readheadPose.getTruePosition()

    log = open("log_%s_%d_%f.csv" % (dataset, deg, scale), "wt")
    log.write("name,num,truePositives,falsePositives,meanSize\n")

    tDetector = TensoflowFaceDector()
    category_index = getGategoryIndex()

    windowNotSet = True

    for p in names:
        dstDir = "result"
        dstname = os.path.join(dstDir, p)
        dirname = os.path.dirname(dstname)
        if not os.path.isdir(dirname):
            os.makedirs(dirname)

        frame = cv.imread(p)
        if deg != 0:
            frame = helper.rotate(frame, deg)

        [h, w] = frame.shape[:2]
        scaledImg = helper.scaledImage(frame, scale)
        frame = scaledImg

        cols = frame.shape[1]
        rows = frame.shape[0]
        [h, w] = frame.shape[:2]
        imgCenter = [cols / 2, rows / 2]

        (boxes, scores, classes, num_detections) = tDetector.run(frame)

        trueDetection = {True: 0, False: 0}

        if dataset in ("lfw", ):
            center = imgCenter
            center = (int(scale * center[0]), int(scale * center[1]))

        elif dataset == "headPose":
            v = d[p]
            center = (v[0], v[1])
            center = readheadPose.getRotatedPoint(center, deg, imgCenter)
            # ここで縮小したことによる画像の点の扱いを修正すること
            center = (int(scale * center[0]), int(scale * center[1]))

            r = int(50 * scale)

            cv.circle(frame, center, r, (0, 255, 0))
        else:
            center = imgCenter
            center = (int(scale * center[0]), int(scale * center[1]))

        trueSizes = []
        boxes_shape = boxes.shape
        for i in range(boxes_shape[0]):
            ymin, xmin, ymax, xmax = boxes[i, 0], boxes[i,
                                                        1], boxes[i,
                                                                  2], boxes[i,
                                                                            3]
            yLeftTop, xLeftTop, yRightBottom, xRightBottom = ymin * h, xmin * w, ymax * h, xmax * w
            yLeftTop, xLeftTop, yRightBottom, xRightBottom = int(
                yLeftTop), int(xLeftTop), int(yRightBottom), int(xRightBottom)
            width = xRightBottom - xLeftTop

            if scores[i] <= min_score_thresh:
                continue

            isPositive = helper.isInside(center, (xLeftTop, yLeftTop),
                                         (xRightBottom, yRightBottom))

            trueDetection[isPositive] += 1
            trueSizes.append(width)

            cv.circle(frame, (xLeftTop, yLeftTop), 5, (0, 255, 0))
            cv.circle(frame, (xRightBottom, yRightBottom), 5, (0, 255, 0))

            color = {True: (0, 255, 0), False: (0, 0, 255)}[isPositive]
            cv.rectangle(frame, (xLeftTop, yLeftTop),
                         (xRightBottom, yRightBottom), color, 5)

        found = trueDetection[True] + trueDetection[False]
        log.write("%s, %d, %d, %d, %s\n" %
                  (p, found, trueDetection[True], trueDetection[False], `
                   np.mean(trueSizes) `))

        if windowNotSet is True:
            cv.namedWindow("tensorflow based (%d, %d)" % (w, h),
                           cv.WINDOW_NORMAL)
            windowNotSet = False

        if showImg:
            cv.imshow("tensorflow based (%d, %d)" % (w, h), frame)
            k = cv.waitKey(1) & 0xff
            if k == ord('q') or k == 27:
                break

    log.close()
    cv.destroyAllWindows()
Example #6
0
 def estimate(OO0OO0O0OO00000OO, O000000OO000000OO):  #line:108
     O0OOO00O000000O0O = O000O00OO0O0O0OOO(
         (OOO00O00000OO0O0O, OOO00O00000OO0O0O))  #line:110
     O0000OOO0OOO0O0O0 = OOO00O00000OO0O0O  #line:111
     O0000O00OO00O0OOO = OOO00O00000OO0O0O  #line:112
     if O000000OO000000OO.size > 1:  #line:115
         O0OOOOOOO0O0O00OO = O000000OO000000OO[:, 2].squeeze().astype(
             O00OO00OOO0OOO0OO.int)  #line:118
         if O0OOOOOOO0O0O00OO.size == 1:  #line:121
             O0OOOOOOO0O0O00OO = O0OOOOOOO0O0O00OO[None]  #line:122
         OO000O0O00OOOOO00 = O0OOOOOO000OO000O(O0OOOOOOO0O0O00OO)  #line:125
         O000000OO000000OO = O000000OO000000OO[OO000O0O00OOOOO00]  #line:126
         O0OOOOOOO0O0O00OO = O0OOOOOOO0O0O00OO[OO000O0O00OOOOO00]  #line:127
         OO0O0O0000000O000 = O000O00OO0O0O0OOO([
             OO0O0O0O0O00O0000
             for OO0O0O0O0O00O0000 in OO00O00OOOO0OOO0O.product(
                 range(O000000OO000000OO.shape[0]), repeat=2)
         ])  #line:130
         OO0O0O0000000O000 = OO0O0O0000000O000[
             OO0O0O0000000O000[:, 0] != OO0O0O0000000O000[:,
                                                          1], :]  #line:133
         OOOOO00O0O0000OO0 = OO0O0O0000000O000[:, 0]  #line:136
         OOOOO0O0OOOO00O0O = OO0O0O0000000O000[:, 1]  #line:137
         O0OO00000OO00O00O = O0OOOOOOO0O0O00OO[
             OOOOO00O0O0000OO0] - 1  #line:139
         O0000OO0OOO0OO00O = O0OOOOOOO0O0O00OO[
             OOOOO0O0OOOO00O0O] - 1  #line:140
         OOOO0OOO000O00O0O = O0OO00000OO00O00O == O0000OO0OOO0OO00O  #line:142
         if not all(OOOO0OOO000O00O0O):  #line:144
             OOOOO00O0O0000OO0 = OOOOO00O0O0000OO0[
                 ~OOOO0OOO000O00O0O]  #line:145
             OOOOO0O0OOOO00O0O = OOOOO0O0OOOO00O0O[
                 ~OOOO0OOO000O00O0O]  #line:146
             O0OO00000OO00O00O = O0OO00000OO00O00O[
                 ~OOOO0OOO000O00O0O]  #line:148
             O0000OO0OOO0OO00O = O0000OO0OOO0OO00O[
                 ~OOOO0OOO000O00O0O]  #line:149
             O0O0000O000OOO00O = O000000OO000000OO[OOOOO00O0O0000OO0, :
                                                   2]  #line:151
             O0O0O0OO00O00OO0O = O000000OO000000OO[OOOOO0O0OOOO00O0O, :
                                                   2]  #line:152
             O0O0O000OOO000O00 = O0000O0O0000O0O00.L2norm(
                 O0O0000O000OOO00O - O0O0O0OO00O00OO0O)  #line:156
             OO0O000OOO0OO0O0O = OO0OO0O0OO00000OO.squareform[
                 O0OO00000OO00O00O, O0000OO0OOO0OO00O]  #line:158
             O0OO0O0OO00O0OOO0 = O0O0O000OOO000O00 / OO0O000OOO0OO0O0O  #line:160
             OO0O000OO00OO0000 = O00OO00OOO0OOO0OO.empty(
                 (O0O0000O000OOO00O.shape[0]))  #line:163
             O0O000O0000000OO0 = O0O0O0OO00O00OO0O - O0O0000O000OOO00O  #line:165
             OO0O000OO00OO0000 = O00OO00OOO0OOO0OO.arctan2(
                 O0O000O0000000OO0[:, 1], O0O000O0000000OO0[:,
                                                            0])  #line:166
             O00O0OO0O00OOO00O = OO0OO0O0OO00000OO.angles[
                 O0OO00000OO00O00O, O0000OO0OOO0OO00O]  #line:168
             OO00O00OO00O0O0OO = OO0O000OO00OO0000 - O00O0OO0O00OOO00O  #line:170
             OOOOO0O00O00O0OOO = O00OO00OOO0OOO0OO.abs(
                 OO00O00OO00O0O0OO) > O00O00000O0OO0000.pi  #line:173
             OO00O00OO00O0O0OO[OOOOO0O00O00O0OOO] = OO00O00OO00O0O0OO[
                 OOOOO0O00O00O0OOO] - O00OO00OOO0OOO0OO.sign(
                     OO00O00OO00O0O0OO[OOOOO0O00O00O0OOO]
                 ) * 2 * O00O00000O0OO0000.pi  #line:175
             O0000OOO0OOO0O0O0 = OO0OOO00OOOOOOO0O(
                 O0OO0O0OO00O0OOO0)  #line:177
             if not OO0OO0O0OO00000OO.estimate_scale:  #line:178
                 O0000OOO0OOO0O0O0 = 1
                 #line:179
             O0000O00OO00O0OOO = OO0OOO00OOOOOOO0O(
                 OO00O00OO00O0O0OO)  #line:181
             if not OO0OO0O0OO00000OO.estimate_rotation:  #line:182
                 O0000O00OO00O0OOO = 0
                 #line:183
             OOO0OO0O0O0O00OOO = O000000OO000000OO[:, 2].astype(
                 O00OO00OOO0OOO0OO.int)  #line:185
             OOO000000O0OO000O = O000000OO000000OO[:, :2] - O0000OOO0OOO0O0O0 * (
                 O0000O0O0000O0O00.rotate(
                     OO0OO0O0OO00000OO.springs[OOO0OO0O0O0O00OOO - 1],
                     O0000O00OO00O0OOO))  #line:186
             OO0OO0O0OO00000OO.votes = OOO000000O0OO000O  #line:189
             O0OO0O0OO0OO0O00O = O0O000000000O000O.spatial.distance.pdist(
                 OOO000000O0OO000O)  #line:192
             OOOO0O0OO000O00OO = O0O000000000O000O.cluster.hierarchy.linkage(
                 O0OO0O0OO0OO0O00O)  #line:195
             O000OOOOOO00O000O = O0O000000000O000O.cluster.hierarchy.fcluster(
                 OOOO0O0OO000O00OO,
                 OO0OO0O0OO00000OO.THR_OUTLIER,
                 criterion='distance')  #line:198
             OO00000OO000O000O = O00OO00OOO0OOO0OO.bincount(
                 O000OOOOOO00O000O)  #line:201
             O00O0O00000OOOOO0 = O0O000O0O0O00O0O0(
                 OO00000OO000O000O)  #line:204
             OOOOOOOOO0O0O000O = O000OOOOOO00O000O == O00O0O00000OOOOO0  #line:207
             OO0OO0O0OO00000OO.outliers = O000000OO000000OO[
                 ~OOOOOOOOO0O0O000O, :]  #line:211
             O000000OO000000OO = O000000OO000000OO[
                 OOOOOOOOO0O0O000O, :]  #line:214
             OOO000000O0OO000O = OOO000000O0OO000O[
                 OOOOOOOOO0O0O000O, :]  #line:217
             O0OOO00O000000O0O = O00OO00OOO0OOO0OO.mean(OOO000000O0OO000O,
                                                        axis=0)  #line:220
     return (O0OOO00O000000O0O, O0000OOO0OOO0O0O0, O0000O00OO00O0OOO,
             O000000OO000000OO)  #line:222
Example #7
0
 def process_frame(O0000O0000O00OOO0, O0O0O00000OOO000O):  #line:224
     OOO00O0OO0O00000O, _O0OOO0O000O000000 = O0000O0O0000O0O00.track(
         O0000O0000O00OOO0.im_prev, O0O0O00000OOO000O,
         O0000O0000O00OOO0.active_keypoints)  #line:226
     (O00OOO0O000000000, OOO0O0O00OO0O0OO0,
      O00OO0OO0O0O0O0OO, OOO00O0OO0O00000O) = O0000O0000O00OOO0.estimate(
          OOO00O0OO0O00000O)  #line:227
     OOO0O000OO0O000OO = O0000O0000O00OOO0.detector.detect(
         O0O0O00000OOO000O)  #line:230
     OOO0O000OO0O000OO, OO000OOOOOOO0O00O = O0000O0000O00OOO0.descriptor.compute(
         O0O0O00000OOO000O, OOO0O000OO0O000OO)  #line:231
     OO000O0000OOO0O0O = OO000O0OOO00OOOOO((0, 3))  #line:234
     OOO0OO0OO00O0OO00 = O0000O0000O00OOO0.matcher.knnMatch(
         OO000OOOOOOO0O00O, O0000O0000O00OOO0.features_database,
         2)  #line:237
     if not any(OO00O0OO0O000OOOO(O00OOO0O000000000)):  #line:239
         OO0O00OOOOOOO0OO0 = O0000O0000O00OOO0.matcher.knnMatch(
             OO000OOOOOOO0O00O, O0000O0000O00OOO0.selected_features,
             len(O0000O0000O00OOO0.selected_features))  #line:240
     if len(OOO0O000OO0O000OO) > 0:  #line:244
         OOO0O0OOOOOOO0O00 = OOO0O0O00OO0O0OO0 * O0000O0O0000O0O00.rotate(
             O0000O0000O00OOO0.springs, -O00OO0OO0O0O0O0OO)  #line:245
         for O0O00O0OO00OOOO0O in range(len(OOO0O000OO0O000OO)):  #line:246
             OO0O0000O00O00OO0 = O00OO00OOO0OOO0OO.array(
                 OOO0O000OO0O000OO[O0O00O0OO00OOOO0O].pt)  #line:249
             O000OOOOO0OOOO00O = OOO0OO0OO00O0OO00[
                 O0O00O0OO00OOOO0O]  #line:253
             OOOO000O00O000000 = O00OO00OOO0OOO0OO.array([
                 OOO0OOOO00000O0O0.distance
                 for OOO0OOOO00000O0O0 in O000OOOOO0OOOO00O
             ])  #line:254
             OO00OOO00OOOO0000 = 1 - OOOO000O00O000000 / O0000O0000O00OOO0.DESC_LENGTH  #line:257
             O00OO000OOO0O00O0 = O0000O0000O00OOO0.database_classes  #line:259
             O00OO0O0O00O0O000 = O000OOOOO0OOOO00O[0].trainIdx  #line:262
             O0O0O0000OOO0O000 = O000OOOOO0OOOO00O[1].trainIdx  #line:263
             O0000O000000O0000 = (1 - OO00OOO00OOOO0000[0]) / (
                 1 - OO00OOO00OOOO0000[1])  #line:266
             O0OO0OO00OOO0000O = O00OO000OOO0O00O0[
                 O00OO0O0O00O0O000]  #line:269
             if O0000O000000O0000 < O0000O0000O00OOO0.THR_RATIO and OO00OOO00OOOO0000[
                     0] > O0000O0000O00OOO0.THR_CONF and O0OO0OO00OOO0000O != 0:  #line:272
                 O0OOO00OO000O00O0 = O0OOO0OOO0O0OO00O(
                     OO0O0000O00O00OO0, O0OO0OO00OOO0000O)  #line:275
                 OO000O0000OOO0O0O = O0OOO0OOO0O0OO00O(
                     OO000O0000OOO0O0O,
                     O000O00OO0O0O0OOO([O0OOO00OO000O00O0]),
                     axis=0)  #line:276
             if not any(OO00O0OO0O000OOOO(O00OOO0O000000000)):  #line:280
                 O000OOOOO0OOOO00O = OO0O00OOOOOOO0OO0[
                     O0O00O0OO00OOOO0O]  #line:283
                 OOOO000O00O000000 = O00OO00OOO0OOO0OO.array([
                     O0OO0000O000O0000.distance
                     for O0OO0000O000O0000 in O000OOOOO0OOOO00O
                 ])  #line:284
                 OO0O0000OO0000000 = O00OO00OOO0OOO0OO.argsort(
                     O00OO00OOO0OOO0OO.array([
                         OO00O0OO0OOOOO00O.trainIdx
                         for OO00O0OO0OOOOO00O in O000OOOOO0OOOO00O
                     ]))  #line:286
                 OOOO000O00O000000 = OOOO000O00O000000[
                     OO0O0000OO0000000]  #line:287
                 O0O000O0000OOO0OO = 1 - OOOO000O00O000000 / O0000O0000O00OOO0.DESC_LENGTH  #line:290
                 O00O00OO00000000O = OO0O0000O00O00OO0 - O00OOO0O000000000  #line:293
                 OO0OOOO00O0OOOOO0 = O0000O0O0000O0O00.L2norm(
                     OOO0O0OOOOOOO0O00 - O00O00OO00000000O)  #line:296
                 OOO00OO00O0O000O0 = OO0OOOO00O0OOOOO0 < O0000O0000O00OOO0.THR_OUTLIER  #line:299
                 OO00OOO00OOOO0000 = OOO00OO00O0O000O0 * O0O000O0000OOO0OO  #line:301
                 O00OO000OOO0O00O0 = O0000O0000O00OOO0.selected_classes  #line:303
                 O000O00O000OOO0O0 = O0OOOOOO000OO000O(
                     OO00OOO00OOOO0000)[::-1]  #line:306
                 O00OO0O0O00O0O000 = O000O00O000OOO0O0[0]  #line:309
                 O0O0O0000OOO0O000 = O000O00O000OOO0O0[1]  #line:310
                 O0000O000000O0000 = (
                     1 - OO00OOO00OOOO0000[O00OO0O0O00O0O000]) / (
                         1 - OO00OOO00OOOO0000[O0O0O0000OOO0O000]
                     )  #line:313
                 O0OO0OO00OOO0000O = O00OO000OOO0O00O0[
                     O00OO0O0O00O0O000]  #line:316
                 if O0000O000000O0000 < O0000O0000O00OOO0.THR_RATIO and OO00OOO00OOOO0000[
                         O00OO0O0O00O0O000] > O0000O0000O00OOO0.THR_CONF and O0OO0OO00OOO0000O != 0:  #line:319
                     O0OOO00OO000O00O0 = O0OOO0OOO0O0OO00O(
                         OO0O0000O00O00OO0, O0OO0OO00OOO0000O)  #line:322
                     if OO000O0000OOO0O0O.size > 0:  #line:325
                         OO0OO00OO0OOOO00O = O00OO00OOO0OOO0OO.nonzero(
                             OO000O0000OOO0O0O[:, 2] ==
                             O0OO0OO00OOO0000O)  #line:326
                         OO000O0000OOO0O0O = O00OO00OOO0OOO0OO.delete(
                             OO000O0000OOO0O0O, OO0OO00OO0OOOO00O,
                             axis=0)  #line:327
                     OO000O0000OOO0O0O = O0OOO0OOO0O0OO00O(
                         OO000O0000OOO0O0O,
                         O000O00OO0O0O0OOO([O0OOO00OO000O00O0]),
                         axis=0)  #line:329
     if OOO00O0OO0O00000O.size > 0:  #line:332
         O00OO00OO000O00OO = OOO00O0OO0O00000O[:, 2]  #line:335
         if OO000O0000OOO0O0O.size > 0:  #line:338
             OOO00O0O0OOOO0OO0 = OO000O0000OOO0O0O[:, 2]  #line:341
             O0O0O0OO00O0OOOOO = ~O00OO00OOO0OOO0OO.in1d(
                 O00OO00OO000O00OO, OOO00O0O0OOOO0OO0)  #line:342
             OO000O0000OOO0O0O = O0OOO0OOO0O0OO00O(
                 OO000O0000OOO0O0O,
                 OOO00O0OO0O00000O[O0O0O0OO00O0OOOOO, :],
                 axis=0)  #line:343
         else:  #line:346
             OO000O0000OOO0O0O = OOO00O0OO0O00000O  #line:347
     _O0OOO0O000O000000 = OO000O0000OOO0O0O  #line:350
     O0000O0000O00OOO0.center = O00OOO0O000000000  #line:351
     O0000O0000O00OOO0.scale_estimate = OOO0O0O00OO0O0OO0  #line:352
     O0000O0000O00OOO0.rotation_estimate = O00OO0OO0O0O0O0OO  #line:353
     O0000O0000O00OOO0.tracked_keypoints = OOO00O0OO0O00000O  #line:354
     O0000O0000O00OOO0.active_keypoints = OO000O0000OOO0O0O  #line:355
     O0000O0000O00OOO0.im_prev = O0O0O00000OOO000O  #line:356
     O0000O0000O00OOO0.keypoints_cv = OOO0O000OO0O000OO  #line:357
     _O0OOO0O000O000000 = O00O000OO0O000OO0.time()  #line:358
     O0000O0000O00OOO0.tl = (OOO00O00000OO0O0O, OOO00O00000OO0O0O
                             )  #line:360
     O0000O0000O00OOO0.tr = (OOO00O00000OO0O0O, OOO00O00000OO0O0O
                             )  #line:361
     O0000O0000O00OOO0.br = (OOO00O00000OO0O0O, OOO00O00000OO0O0O
                             )  #line:362
     O0000O0000O00OOO0.bl = (OOO00O00000OO0O0O, OOO00O00000OO0O0O
                             )  #line:363
     O0000O0000O00OOO0.bb = O000O00OO0O0O0OOO([
         OOO00O00000OO0O0O, OOO00O00000OO0O0O, OOO00O00000OO0O0O,
         OOO00O00000OO0O0O
     ])  #line:365
     O0000O0000O00OOO0.has_result = False  #line:367
     if not any(
             OO00O0OO0O000OOOO(O0000O0000O00OOO0.center)
     ) and O0000O0000O00OOO0.active_keypoints.shape[
             0] > O0000O0000O00OOO0.num_initial_keypoints / 10:  #line:368
         O0000O0000O00OOO0.has_result = True  #line:369
         O0O000O0000O0O0OO = O0000O0O0000O0O00.array_to_int_tuple(
             O00OOO0O000000000 +
             OOO0O0O00OO0O0OO0 * O0000O0O0000O0O00.rotate(
                 O0000O0000O00OOO0.center_to_tl[None, :],
                 O00OO0OO0O0O0O0OO).squeeze())  #line:371
         OOO0O00O0OOOO00O0 = O0000O0O0000O0O00.array_to_int_tuple(
             O00OOO0O000000000 +
             OOO0O0O00OO0O0OO0 * O0000O0O0000O0O00.rotate(
                 O0000O0000O00OOO0.center_to_tr[None, :],
                 O00OO0OO0O0O0O0OO).squeeze())  #line:372
         OO0OOOOOOOO0O00OO = O0000O0O0000O0O00.array_to_int_tuple(
             O00OOO0O000000000 +
             OOO0O0O00OO0O0OO0 * O0000O0O0000O0O00.rotate(
                 O0000O0000O00OOO0.center_to_br[None, :],
                 O00OO0OO0O0O0O0OO).squeeze())  #line:373
         O00OO0000O000O0O0 = O0000O0O0000O0O00.array_to_int_tuple(
             O00OOO0O000000000 +
             OOO0O0O00OO0O0OO0 * O0000O0O0000O0O00.rotate(
                 O0000O0000O00OOO0.center_to_bl[None, :],
                 O00OO0OO0O0O0O0OO).squeeze())  #line:374
         O0OOO0O00OOO0OO0O = min(
             (O0O000O0000O0O0OO[0], OOO0O00O0OOOO00O0[0],
              OO0OOOOOOOO0O00OO[0], O00OO0000O000O0O0[0]))  #line:376
         O0O00OOOO00000O00 = min(
             (O0O000O0000O0O0OO[1], OOO0O00O0OOOO00O0[1],
              OO0OOOOOOOO0O00OO[1], O00OO0000O000O0O0[1]))  #line:377
         OOO0O00000O0O0OOO = max(
             (O0O000O0000O0O0OO[0], OOO0O00O0OOOO00O0[0],
              OO0OOOOOOOO0O00OO[0], O00OO0000O000O0O0[0]))  #line:378
         OOOOO00O0O0000O00 = max(
             (O0O000O0000O0O0OO[1], OOO0O00O0OOOO00O0[1],
              OO0OOOOOOOO0O00OO[1], O00OO0000O000O0O0[1]))  #line:379
         O0000O0000O00OOO0.tl = O0O000O0000O0O0OO  #line:381
         O0000O0000O00OOO0.tr = OOO0O00O0OOOO00O0  #line:382
         O0000O0000O00OOO0.bl = O00OO0000O000O0O0  #line:383
         O0000O0000O00OOO0.br = OO0OOOOOOOO0O00OO  #line:384
         O0000O0000O00OOO0.bb = O00OO00OOO0OOO0OO.array([
             O0OOO0O00OOO0OO0O, O0O00OOOO00000O00,
             OOO0O00000O0O0OOO - O0OOO0O00OOO0OO0O,
             OOOOO00O0O0000O00 - O0O00OOOO00000O00
         ])
def processDatabase(dataset,
                    names,
                    deg=0,
                    scale=1.0,
                    confThreshold=0.5,
                    showImg=True):
    """run face detection for named dataset as names.
    dataset:
    names:
    deg: angle (anti-clockwise)
    """
    if dataset == "headPose":
        import readheadPose
        d = readheadPose.getTruePosition()

    log = open("log_%s_%d_%f.csv" % (dataset, deg, scale), "wt")
    log.write("name,num,truePositives,falsePositives,meanSize\n")

    detector = resnetFaceDetector.ResnetFaceDetector()

    for p in names:
        dstDir = "result"
        dstname = os.path.join(dstDir, p)
        dirname = os.path.dirname(dstname)
        if not os.path.isdir(dirname):
            os.makedirs(dirname)

        frame = cv.imread(p)
        if deg != 0:
            frame = helper.rotate(frame, deg)

        [h, w] = frame.shape[:2]
        scaledImg = helper.scaledImage(frame, scale)
        frame = scaledImg

        cols = frame.shape[1]
        rows = frame.shape[0]
        [h, w] = frame.shape[:2]
        imgCenter = [cols / 2, rows / 2]

        dets, confidences, perf_stats = detector.run(frame, confThreshold)

        trueDetection = {True: 0, False: 0}

        if dataset in ("lfw", ):
            center = imgCenter
            center = (int(scale * center[0]), int(scale * center[1]))

        elif dataset == "headPose":
            v = d[p]
            center = (v[0], v[1])
            center = readheadPose.getRotatedPoint(center, deg, imgCenter)
            # ここで縮小したことによる画像の点の扱いを修正すること
            center = (int(scale * center[0]), int(scale * center[1]))

            r = int(50 * scale)

            cv.circle(frame, center, r, (0, 255, 0))
        else:
            center = imgCenter
            center = (int(scale * center[0]), int(scale * center[1]))

        trueSizes = []

        for i, det in enumerate(dets):
            confidence = confidences[i]
            xLeftTop, yLeftTop, width, height = det
            xRightBottom = xLeftTop + width
            yRightBottom = yLeftTop + height

            width = xRightBottom - xLeftTop

            isPositive = helper.isInside(center, (xLeftTop, yLeftTop),
                                         (xRightBottom, yRightBottom))
            trueDetection[isPositive] += 1
            trueSizes.append(width)

            cv.circle(frame, (xLeftTop, yLeftTop), 5, (0, 255, 0))
            cv.circle(frame, (xRightBottom, yRightBottom), 5, (0, 255, 0))

            color = {True: (0, 255, 0), False: (0, 0, 128)}[isPositive]
            cv.rectangle(frame, (xLeftTop, yLeftTop),
                         (xRightBottom, yRightBottom), color)

            label = "face: %.4f" % confidence
            labelSize, baseLine = cv.getTextSize(label,
                                                 cv.FONT_HERSHEY_SIMPLEX, 0.5,
                                                 1)

            cv.rectangle(frame, (xLeftTop, yLeftTop - labelSize[1]),
                         (xLeftTop + labelSize[0], yLeftTop + baseLine),
                         (255, 255, 255), cv.FILLED)
            cv.putText(frame, label, (xLeftTop, yLeftTop),
                       cv.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0))

        found = trueDetection[True] + trueDetection[False]
        log.write("%s, %d, %d, %d, %s\n" %
                  (p, found, trueDetection[True], trueDetection[False], `
                   np.mean(trueSizes) `))
        cv.imwrite(dstname, frame)

        if showImg:
            cv.imshow("resnet based (%d, %d)" % (w, h), frame)
            k = cv.waitKey(1) & 0xff
            if k == ord('q') or k == 27:
                break
    log.close()
    cv.destroyAllWindows()
Example #9
0
sprites["cookie"].set_colorkey((255, 255, 255))
sprites["cookie"] = pygame.transform.scale(sprites["cookie"], (200, 200))
sprites["background"] = pygame.transform.scale2x(sprites["background"])

objects["cookie"].sprite = sprites["cookie"]
objects["cookie"].x = int(WIN_WIDTH / 2)
objects["cookie"].y = int(WIN_HEIGHT / 2)

objects["background"].sprite = sprites["background"]
objects["background"].x = int(WIN_WIDTH / 2)
objects["background"].y = int(WIN_HEIGHT / 2)
objects["background"].center_x = int(WIN_WIDTH / 2)
objects["background"].center_y = int(WIN_HEIGHT / 2)

objects["background"].actions_push(
    lambda: helper.rotate(objects["background"], 0.5))

objects["cookie"].events(pygame.MOUSEBUTTONDOWN,
                         lambda: helper.add_score(objects["cookie"], 6))

END_FLAG = True

white = pygame.Surface((WIN_WIDTH, WIN_HEIGHT))
white.fill((255, 255, 255))

while END_FLAG:
    clear_display()

    EVENTS = pygame.event.get()

    for event in EVENTS: