Exemplo n.º 1
0
def validation(model, val_loader,p3d_mean, p3d_std):
    joint_error = []
    PA_MPJPE = []
    with torch.no_grad():
        model.eval()
        for _, (_, pose3d, _, pose2d, _) in enumerate(val_loader):
            p2d = pose2d.to(DEVICE).float()

            p3d_pred = model(p2d)

            # unnormalize groundtruth pose3d, after unnorm, pelvis = [0,0,0]
            p3d_gt_unnorm = pose3d.cpu().detach().numpy().reshape([BATCHSIZE, -1, 3])
            p3d_pelvis = p3d_gt_unnorm[:, 0, :]
            p3d_pelvis = np.expand_dims(p3d_pelvis, axis=1)
            p3d_pelvis = np.repeat(p3d_pelvis, 17, axis=1)
            p3d_gt_unnorm = p3d_gt_unnorm - p3d_pelvis

            # unnormalize predicted pose3d
            p3d_pred_np = p3d_pred.cpu().detach().numpy().reshape([BATCHSIZE, -1, 3])
            p3d_pred_unnorm = util.unnormalize(pose3d_norm=p3d_pred_np,
                                                mean=p3d_mean,
                                                std=p3d_std,
                                                num_joints=17)

            MPJPE = util.get_error(pose3d_pred=p3d_pred_unnorm, pose3d_gt=p3d_gt_unnorm)
            joint_error.append(MPJPE[0])
            PA_MPJPE.append(MPJPE[1])
    joint_error_mean = np.array(joint_error).mean()
    PA_MPJPE_mean = np.array(PA_MPJPE).mean()
    return joint_error_mean, PA_MPJPE_mean
Exemplo n.º 2
0
def update_event():
    data = request.data
    dataDict = json.loads(data)
    user_email = dataDict['user_email']
    session_token = dataDict['session_token']
    user_valid = util.validate_user(user_email, session_token)
    if not user_valid[0]:
        return jsonify(user_valid[1]) 
    catchup = dataDict['catchup']
    catchup_id = catchup['_id']['$oid']
    catchup_obj = Catchup.objects.get(id=catchup_id)
    if user_email != catchup_obj.catchup_owner:
        return jsonify(util.get_error('Invalid permissions'))
    catchup_obj.catchup_title=catchup['catchup_title']
    catchup_obj.compare_accepted_users(catchup['accepted_users'])
    catchup_obj.accepted_users = catchup['accepted_users']
    catchup_obj.new_invite_users(catchup['invited_users'])
    catchup_obj.compare_invited_users(catchup['invited_users'])
    catchup_obj.invited_users = catchup['invited_users']
    catchup_obj.frequency = catchup['frequency']
    if 'current_event' in  catchup:
        current_event = catchup['current_event']
        catchup_obj.current_event.event_name = current_event['event_name']
        catchup_obj.current_event.event_start_time = current_event['event_start_time']
        catchup_obj.current_event.event_end_time = current_event['event_end_time']
        catchup_obj.current_event.event_location = current_event['event_location']
        catchup_obj.current_event.event_duration = current_event['event_duration']
    catchup_obj.save()
    print(catchup_obj)
    return jsonify({'success': 'updated catchup!'})
Exemplo n.º 3
0
def train(model, train_loader, val_loader, optimizer, p3d_mean, p3d_std,
          traincounter):
    model.train()
    val_joint_error_mean = 1000
    val_PA_MPJPE_mean = 1000
    loader = tqdm(train_loader)
    for _, (_, pose3d, pose3d_norm, pose2d, _) in enumerate(loader):
        p3d_gt_norm, p2d = pose3d_norm.to(DEVICE), pose2d.to(
            DEVICE).float().view([-1, 17, 2])

        # forward
        p3d_pred = model(p2d).view([-1, 51])
        loss = F.mse_loss(p3d_gt_norm, p3d_pred, reduction='mean')

        # backward
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()

        # unnormalize groundtruth pose3d
        p3d_gt_unnorm = pose3d.cpu().detach().numpy().reshape(
            [BATCHSIZE, -1, 3])
        p3d_pelvis = p3d_gt_unnorm[:, 0, :]
        p3d_pelvis = np.expand_dims(p3d_pelvis, axis=1)
        p3d_pelvis = np.repeat(p3d_pelvis, 17, axis=1)
        p3d_gt_unnorm = p3d_gt_unnorm - p3d_pelvis

        # unnormalize predicted pose3d
        p3d_pred_np = p3d_pred.cpu().detach().numpy().reshape(
            [BATCHSIZE, -1, 3])
        p3d_pred_unnorm = util.unnormalize(pose3d_norm=p3d_pred_np,
                                           mean=p3d_mean,
                                           std=p3d_std,
                                           num_joints=17)

        traincounter += 1

        if not traincounter % 1000:
            torch.save(model.state_dict(), MODEL_PATH + SEMGCN)
        if not traincounter % 5000:
            val_joint_error_mean, val_PA_MPJPE_mean = validation(
                model=model,
                val_loader=val_loader,
                p3d_mean=p3d_mean,
                p3d_std=p3d_std)
            print('val_joint_error_mean: {}, val_PA_MPJPE_mean: {}'.format(
                val_joint_error_mean, val_PA_MPJPE_mean))
        MPJPE = util.get_error(pose3d_pred=p3d_pred_unnorm,
                               pose3d_gt=p3d_gt_unnorm)
        loader.set_description("joint error:{:.4f}, MPJPE: {:.4f}".format(
            MPJPE[0], MPJPE[1]))
        loader.refresh()

    return traincounter
Exemplo n.º 4
0
def delete_catchup():
    data = request.data
    dataDict = json.loads(data)
    user_email = dataDict['user_email']
    session_token = dataDict['session_token']
    user_valid = util.validate_user(user_email, session_token)
    if not user_valid[0]:
        return jsonify(user_valid[1]) 
    catchup = dataDict['catchup'] 
    catchup_obj = Catchup.objects.get(id=catchup['_id']['$oid'])
    if user_email != catchup_obj.catchup_owner:
        return jsonify(util.get_error('Invalid permissions'))
    owner_obj = User.objects.get(email=catchup_obj.catchup_owner)
    owner_obj.remove_catchup(catchup_obj.id)
    for user in catchup_obj.accepted_users:
        user_obj = User.objects.get(email=user)
        user_obj.remove_catchup(catchup_obj.id)
    for user in catchup_obj.invited_users:
        user_obj = User.objects.get(email=user)
        user_obj.remove_catchup(catchup_obj.id)
    catchup_obj.delete()
    return jsonify({'success': 'deleted catchup'})
Exemplo n.º 5
0
    else:
        print '{} is an invalid model. Pick from (ff),(linear),(svr),(gru).'.format(FLAGS.model)
        sys.exit()

    if FLAGS.train == True:
        model.train((X[train], Y[train]))

    t0 = time.clock()
    prediction = model.predict((X[test], Y[test]))
    print '{}: {} seconds took for prediction'.format(FLAGS.model, time.clock() - t0)

    if FLAGS.model in set(['ff', 'gru']):
        model.sess.close()
        tf.reset_default_graph()

    error = get_error(Y[test], prediction)
    print """=================
nrsmd per hour: {}
nrsmd mean: {}""".format(error, np.mean(error))

    errors.append(error)

    if FLAGS.plot == True:
        plt.plot(Y[test][-15:-1].flatten())
        plt.plot(prediction[-15:-1].flatten())
        plt.show()

    # elif FLAGS.model == 'lstm':
    #     recurrent = Recurrent()
    #     x=np.reshape(x, (x.shape[0], x.shape[1], 1))
    #     recurrent.train(training)
Exemplo n.º 6
0
                downloaded_file = episode.download_to(temp_download_location)
                if actual_run and downloaded_file and move(
                        downloaded_file, podcast_file, args.over_write):
                    logging.info(
                        F"Fetch completed for {podcast_file.relative_to(podcast_store_location)}"
                    )
                    #store result to avoid repetition
                    record.store(episode)
                elif downloaded_file:
                    downloaded_file.unlink()
                    logging.info(
                        F"Dry-run fetch completed for {podcast_file.relative_to(podcast_store_location)}"
                    )
                else:
                    logging.error(F"Episode {episode} not downloaded")
            episode = None

        if actual_run:
            record.sort()
            logging.info(F"Download record updated")
    except:
        logging.error(
            F"Faital error with {episode or podcast or 'no info'} ({traceback.format_exc()})"
        )
        set_error(1)

    logging.getLogger().setLevel(logging.INFO)
    logging.info(F"Done updating\n{'='*43}")

    sys.exit(get_error())