# filter based on num of examples from each user
print('\nTotal number of users ' + str(user_emb.shape[0]))
new_user_inds = np.where(cnt_users >= min_num_exs)[0]
user_emb = user_emb[new_user_inds, :]
train_users = train_users[new_user_inds]
user_class_affinity = user_class_affinity[new_user_inds, :]
print('Total number of users after filtering ' + str(user_emb.shape[0]))

print('Performing TSNE on users with min ' + str(min_num_exs) + ' examples')
user_emb_low = TSNE(n_components=2, random_state=seed).fit_transform(user_emb)

# load ocean mask
mask = np.load(get_paths('mask_dir') + 'ocean_mask.npy')
mask_lines = (np.gradient(mask)[0]**2 + np.gradient(mask)[1]**2)
mask_lines[mask_lines > 0.0] = 1.0
gp = grid.GridPredictor(mask, params, mask_only_pred=True)
loc_emb = gp.dense_prediction_masked_feats(model, 0.5).data.cpu().numpy()

# compute locations where users go
mask_inds = np.where(mask.ravel() == 1)[0]
user_loc = np.zeros((mask.shape[0] * mask.shape[1]))
user_loc[mask_inds] = sig(np.dot(loc_emb, user_emb.T)).sum(1)
user_loc = user_loc.reshape((mask.shape[0], mask.shape[1]))
user_loc = np.log(1 + user_loc)
user_loc[mask_lines == 1] = user_loc.max()

plt.close('all')
cmap_r = cm.afmhot.reversed()

# plot user affinity for each location
plt.figure(1)
Exemplo n.º 2
0
def main(args):

    download_model(args.model_url, args.model_path)
    print("Loading model: " + args.model_path)
    net_params = torch.load(args.model_path, map_location="cpu")
    params = net_params["params"]
    device = params["device"] if torch.cuda.is_available() else "cpu"
    model = models.FCNet(
        num_inputs=params["num_feats"],
        num_classes=params["num_classes"],
        num_filts=params["num_filts"],
        num_users=params["num_users"],
    ).to(device)
    model.load_state_dict(net_params["state_dict"])
    model.eval()

    # load class names
    with open(args.class_names_path) as da:
        class_data = json.load(da)

    if args.demo_type == "location":
        # convert coords to torch
        coords = np.array([args.longitude, args.latitude])[np.newaxis, ...]
        obs_coords = utils.convert_loc_to_tensor(coords, params["device"])
        obs_time = (torch.ones(coords.shape[0], device=params["device"]) *
                    args.time_of_year * 2 - 1.0)
        loc_time_feats = utils.encode_loc_time(obs_coords,
                                               obs_time,
                                               concat_dim=1,
                                               params=params)

        print("Making prediction ...")
        with torch.no_grad():
            pred = model(loc_time_feats)[0, :]
        pred = pred.cpu().numpy()

        num_categories = 25
        print("\nTop {} likely categories for location {:.4f}, {:.4f}:".format(
            num_categories, coords[0, 0], coords[0, 1]))
        most_likely = np.argsort(pred)[::-1]
        for ii, cls_id in enumerate(most_likely[:num_categories]):
            print("{}\t{}\t{:.3f}".format(ii, cls_id, np.round(
                pred[cls_id], 3)) + "\t" + class_data[cls_id]["our_name"] +
                  " - " + class_data[cls_id]["preferred_common_name"])

    elif args.demo_type == "map":
        # grid predictor - for making dense predictions for each lon/lat location
        gp = grid.GridPredictor(np.load("data/ocean_mask.npy"),
                                params,
                                mask_only_pred=True)

        if args.class_of_interest == -1:
            args.class_of_interest = np.random.randint(len(class_data))
        print("Selected category: " +
              class_data[args.class_of_interest]["our_name"] + " - " +
              class_data[args.class_of_interest]["preferred_common_name"])

        print("Making prediction ...")
        grid_pred = gp.dense_prediction(model,
                                        args.class_of_interest,
                                        time_step=args.time_of_year)

        op_file_name = (
            class_data[args.class_of_interest]["our_name"].lower().replace(
                " ", "_") + ".png")
        print("Saving prediction to: " + op_file_name)
        plt.imsave(op_file_name,
                   1.0 - grid_pred,
                   cmap="afmhot",
                   vmin=0,
                   vmax=1)
Exemplo n.º 3
0
def main(args):

    download_model(args.model_url, args.model_path)
    print('Loading model: ' + args.model_path)
    net_params = torch.load(args.model_path, map_location='cpu')
    params = net_params['params']
    model = models.FCNet(num_inputs=params['num_feats'],
                         num_classes=params['num_classes'],
                         num_filts=params['num_filts'],
                         num_users=params['num_users']).to(params['device'])
    model.load_state_dict(net_params['state_dict'])
    model.eval()

    # load class names
    with open(args.class_names_path) as da:
        class_data = json.load(da)

    if args.demo_type == 'location':
        # convert coords to torch
        coords = np.array([args.longitude, args.latitude])[np.newaxis, ...]
        obs_coords = utils.convert_loc_to_tensor(coords, params['device'])
        obs_time = torch.ones(coords.shape[0], device=params['device']
                              ) * args.time_of_year * 2 - 1.0
        loc_time_feats = utils.encode_loc_time(obs_coords,
                                               obs_time,
                                               concat_dim=1,
                                               params=params)

        print('Making prediction ...')
        with torch.no_grad():
            pred = model(loc_time_feats)[0, :]
        pred = pred.cpu().numpy()

        num_categories = 25
        print('\nTop {} likely categories for location {:.4f}, {:.4f}:'.format(
            num_categories, coords[0, 0], coords[0, 1]))
        most_likely = np.argsort(pred)[::-1]
        for ii, cls_id in enumerate(most_likely[:num_categories]):
            print('{}\t{}\t{:.3f}'.format(ii, cls_id, np.round(pred[cls_id], 3)) + \
                '\t' + class_data[cls_id]['our_name'] + ' - ' + class_data[cls_id]['preferred_common_name'])

    elif args.demo_type == 'map':
        # grid predictor - for making dense predictions for each lon/lat location
        gp = grid.GridPredictor(np.load('data/ocean_mask.npy'),
                                params,
                                mask_only_pred=True)

        if args.class_of_interest == -1:
            args.class_of_interest = np.random.randint(len(class_data))
        print('Selected category: ' + class_data[args.class_of_interest]['our_name'] +\
            ' - ' + class_data[args.class_of_interest]['preferred_common_name'])

        print('Making prediction ...')
        grid_pred = gp.dense_prediction(model,
                                        args.class_of_interest,
                                        time_step=args.time_of_year)

        op_file_name = class_data[args.class_of_interest]['our_name'].lower(
        ).replace(' ', '_') + '.png'
        print('Saving prediction to: ' + op_file_name)
        plt.imsave(op_file_name,
                   1.0 - grid_pred,
                   cmap='afmhot',
                   vmin=0,
                   vmax=1)