示例#1
0
def main(args):

    download_model(args.model_url, args.model_path)
    print("Loading model: " + args.model_path)
    net_params = torch.load(args.model_path, map_location="cpu")
    params = net_params["params"]
    device = params["device"] if torch.cuda.is_available() else "cpu"
    model = models.FCNet(
        num_inputs=params["num_feats"],
        num_classes=params["num_classes"],
        num_filts=params["num_filts"],
        num_users=params["num_users"],
    ).to(device)
    model.load_state_dict(net_params["state_dict"])
    model.eval()

    # load class names
    with open(args.class_names_path) as da:
        class_data = json.load(da)

    if args.demo_type == "location":
        # convert coords to torch
        coords = np.array([args.longitude, args.latitude])[np.newaxis, ...]
        obs_coords = utils.convert_loc_to_tensor(coords, params["device"])
        obs_time = (torch.ones(coords.shape[0], device=params["device"]) *
                    args.time_of_year * 2 - 1.0)
        loc_time_feats = utils.encode_loc_time(obs_coords,
                                               obs_time,
                                               concat_dim=1,
                                               params=params)

        print("Making prediction ...")
        with torch.no_grad():
            pred = model(loc_time_feats)[0, :]
        pred = pred.cpu().numpy()

        num_categories = 25
        print("\nTop {} likely categories for location {:.4f}, {:.4f}:".format(
            num_categories, coords[0, 0], coords[0, 1]))
        most_likely = np.argsort(pred)[::-1]
        for ii, cls_id in enumerate(most_likely[:num_categories]):
            print("{}\t{}\t{:.3f}".format(ii, cls_id, np.round(
                pred[cls_id], 3)) + "\t" + class_data[cls_id]["our_name"] +
                  " - " + class_data[cls_id]["preferred_common_name"])

    elif args.demo_type == "map":
        # grid predictor - for making dense predictions for each lon/lat location
        gp = grid.GridPredictor(np.load("data/ocean_mask.npy"),
                                params,
                                mask_only_pred=True)

        if args.class_of_interest == -1:
            args.class_of_interest = np.random.randint(len(class_data))
        print("Selected category: " +
              class_data[args.class_of_interest]["our_name"] + " - " +
              class_data[args.class_of_interest]["preferred_common_name"])

        print("Making prediction ...")
        grid_pred = gp.dense_prediction(model,
                                        args.class_of_interest,
                                        time_step=args.time_of_year)

        op_file_name = (
            class_data[args.class_of_interest]["our_name"].lower().replace(
                " ", "_") + ".png")
        print("Saving prediction to: " + op_file_name)
        plt.imsave(op_file_name,
                   1.0 - grid_pred,
                   cmap="afmhot",
                   vmin=0,
                   vmax=1)
示例#2
0
mask = np.load(config.MASK_PATH).astype(np.int)
mask_lines = (np.gradient(mask)[0]**2 + np.gradient(mask)[1]**2)
mask_lines[mask_lines > 0.0] = 1.0
mask_lines = 1.0 - mask_lines
mask = mask.astype(np.uint8)

# create placeholder image that will be displayed
blank_im = np.zeros((mask.shape[0], mask.shape[1], 3), dtype=np.uint8)
for cc in range(3):
    blank_im[:,:,cc] = (255*mask_lines).astype(np.uint8)

# load model
net_params = torch.load(config.MODEL_PATH, map_location='cpu')
params = net_params['params']
params['device'] = 'cpu'
model = models.FCNet(params['num_feats'], params['num_classes'], params['num_filts'], params['num_users']).to(params['device'])
model.load_state_dict(net_params['state_dict'])
model.eval()

# grid predictor - for making dense predictions for each lon/lat location
gp = grid.GridPredictor(mask, params, mask_only_pred=True)

# generate features
print('generating location features')
feats = []
for time_step in np.linspace(0,1,config.NUM_TIME_STEPS+1)[:-1]:
    feats.append(gp.dense_prediction_masked_feats(model, time_step))
print('location features generated')

def create_images(index_of_interest):
    images = []
supercat_names = [cc['supercategory'] for cc in cls_data]
supercat_un, supercat_ids = np.unique(supercat_names, return_inverse=True)

# load user info
train_locs, train_classes, train_users, train_dates, _ = dt.load_inat_data(
    data_dir, 'train2018_locations.json', 'train2018.json', True)
assert (train_users == -1).sum() == 0
un_users, train_users, cnt_users = np.unique(train_users,
                                             return_inverse=True,
                                             return_counts=True)

# load model
net_params = torch.load(model_path, map_location='cpu')
params = net_params['params']
model = models.FCNet(num_inputs=params['num_feats'],
                     num_classes=params['num_classes'],
                     num_filts=params['num_filts'],
                     num_users=params['num_users']).to(params['device'])

model.load_state_dict(net_params['state_dict'])
model.eval()

# load params
user_emb = net_params['state_dict']['user_emb.weight'].numpy()
class_emb = net_params['state_dict']['class_emb.weight'].numpy()
# currently this will not work if there is a bias term in the model
assert model.inc_bias is False


def sig(x):
    return 1.0 / (1.0 + np.exp(-x))
示例#4
0
def main(args):

    download_model(args.model_url, args.model_path)
    print('Loading model: ' + args.model_path)
    net_params = torch.load(args.model_path, map_location='cpu')
    params = net_params['params']
    model = models.FCNet(num_inputs=params['num_feats'],
                         num_classes=params['num_classes'],
                         num_filts=params['num_filts'],
                         num_users=params['num_users']).to(params['device'])
    model.load_state_dict(net_params['state_dict'])
    model.eval()

    # load class names
    with open(args.class_names_path) as da:
        class_data = json.load(da)

    if args.demo_type == 'location':
        # convert coords to torch
        coords = np.array([args.longitude, args.latitude])[np.newaxis, ...]
        obs_coords = utils.convert_loc_to_tensor(coords, params['device'])
        obs_time = torch.ones(coords.shape[0], device=params['device']
                              ) * args.time_of_year * 2 - 1.0
        loc_time_feats = utils.encode_loc_time(obs_coords,
                                               obs_time,
                                               concat_dim=1,
                                               params=params)

        print('Making prediction ...')
        with torch.no_grad():
            pred = model(loc_time_feats)[0, :]
        pred = pred.cpu().numpy()

        num_categories = 25
        print('\nTop {} likely categories for location {:.4f}, {:.4f}:'.format(
            num_categories, coords[0, 0], coords[0, 1]))
        most_likely = np.argsort(pred)[::-1]
        for ii, cls_id in enumerate(most_likely[:num_categories]):
            print('{}\t{}\t{:.3f}'.format(ii, cls_id, np.round(pred[cls_id], 3)) + \
                '\t' + class_data[cls_id]['our_name'] + ' - ' + class_data[cls_id]['preferred_common_name'])

    elif args.demo_type == 'map':
        # grid predictor - for making dense predictions for each lon/lat location
        gp = grid.GridPredictor(np.load('data/ocean_mask.npy'),
                                params,
                                mask_only_pred=True)

        if args.class_of_interest == -1:
            args.class_of_interest = np.random.randint(len(class_data))
        print('Selected category: ' + class_data[args.class_of_interest]['our_name'] +\
            ' - ' + class_data[args.class_of_interest]['preferred_common_name'])

        print('Making prediction ...')
        grid_pred = gp.dense_prediction(model,
                                        args.class_of_interest,
                                        time_step=args.time_of_year)

        op_file_name = class_data[args.class_of_interest]['our_name'].lower(
        ).replace(' ', '_') + '.png'
        print('Saving prediction to: ' + op_file_name)
        plt.imsave(op_file_name,
                   1.0 - grid_pred,
                   cmap='afmhot',
                   vmin=0,
                   vmax=1)