Ejemplo n.º 1
0
def test_boptim(acqf, result):
    Z_sparse = initial_seed()
    X_full = gprutils.get_full_grid(Z_sparse)
    X_sparse = gprutils.get_sparse_grid(Z_sparse)
    expected_result = np.load(result)
    boptim = boptimizer(X_sparse,
                        Z_sparse,
                        X_full,
                        trial_func,
                        acquisition_function=acqf,
                        exploration_steps=20,
                        use_gpu=False,
                        verbose=1)
    boptim.run()
    assert_allclose(boptim.target_func_vals[-1], expected_result)
Ejemplo n.º 2
0
def test_skgpr_2d(kernel):  # sanity check only, due to comput cost
    R = get_dummy_data()
    X = gprutils.get_sparse_grid(R)
    X_true = gprutils.get_full_grid(R)
    mean, sd, _ = skgpr.skreconstructor(X,
                                        R,
                                        X_true,
                                        kernel=kernel,
                                        learning_rate=0.1,
                                        iterations=2,
                                        use_gpu=False,
                                        verbose=False).run()
    assert_(mean.shape == sd.shape == R.shape)
    assert_(not np.isnan(mean).any())
    assert_(not np.isnan(sd).any())
Ejemplo n.º 3
0
def test_gpr_3d(kernel):  # sanity check only due to comput cost
    R = np.load(test_data3d)
    X = gprutils.get_sparse_grid(R)
    X_true = gprutils.get_full_grid(R)
    mean, sd, _ = gpr.reconstructor(X,
                                    R,
                                    X_true,
                                    kernel=kernel,
                                    lengthscale=None,
                                    indpoints=50,
                                    learning_rate=0.1,
                                    iterations=2,
                                    use_gpu=False,
                                    verbose=True).run()
    assert_(mean.shape == sd.shape == R.flatten().shape)
    assert_(not np.isnan(mean).any())
    assert_(not np.isnan(sd).any())
Ejemplo n.º 4
0
def test_gpr_2d(kernel):
    R = np.load(test_data2d)
    R_ = np.load(test2d_expected_result)
    X = gprutils.get_sparse_grid(R)
    X_true = gprutils.get_full_grid(R)
    mean, _, _ = gpr.reconstructor(X,
                                   R,
                                   X_true,
                                   kernel=kernel,
                                   lengthscale=[[1., 1.], [4., 4.]],
                                   indpoints=250,
                                   learning_rate=0.1,
                                   iterations=200,
                                   use_gpu=False,
                                   verbose=False).run()
    assert_(ssim(mean, R_) > 0.95)
    assert_(np.linalg.norm(mean - R_) < 3)
Ejemplo n.º 5
0
def test_skgpr_3d(kernel):  # sanity check only, due to comput cost
    R = np.load(test_data3d)
    X = gprutils.get_sparse_grid(R)
    X_true = gprutils.get_full_grid(R)
    (mean, sd), _ = skgpr.skreconstructor(X,
                                          R,
                                          X_true,
                                          kernel=kernel,
                                          lengthscale=None,
                                          grid_points_ratio=.25,
                                          learning_rate=0.1,
                                          iterations=2,
                                          num_batches=100,
                                          calculate_sd=True,
                                          use_gpu=False,
                                          verbose=True).run()
    assert_(mean.shape == sd.shape == R.flatten().shape)
    assert_(not np.isnan(mean).any())
    assert_(not np.isnan(sd).any())
Ejemplo n.º 6
0
def test_skgpr_2d(kernel):
    R = np.load(test_data)
    R_ = np.load(test_expected_result)
    X = gprutils.get_sparse_grid(R)
    X_true = gprutils.get_full_grid(R)
    mean, _ = skgpr.skreconstructor(X,
                                    R,
                                    X_true,
                                    kernel=kernel,
                                    lengthscale=[[1., 1.], [4., 4.]],
                                    grid_points_ratio=1.,
                                    learning_rate=0.1,
                                    iterations=20,
                                    calculate_sd=False,
                                    num_batches=1,
                                    use_gpu=False,
                                    verbose=False).run()
    assert_(ssim(mean, R_) > 0.98)
    assert_(np.linalg.norm(mean - R_) < 1)
Ejemplo n.º 7
0
                    type=int,
                    help="1 for using GPU, 0 for running on CPU")
parser.add_argument("--SAVEDIR",
                    nargs="?",
                    default="Output",
                    type=str,
                    help="directory to save outputs")

args = parser.parse_args()

# Load data (e.g. N x M image or N x M x L spectroscopic grid)
R_true = np.load(args.FILEPATH)
if args.NORMALIZE and np.isnan(R_true).any() is False:
    R_true = (R_true - np.amin(R_true)) / np.ptp(R_true)
# Get "ground truth" grid indices
X_true = gprutils.get_full_grid(R_true, dense_x=1.)
# Construct lengthscale constraints for all dimensions
LENGTH_CONSTR = [[
    float(args.LENGTH_CONSTR_MIN) for i in range(np.ndim(R_true))
], [float(args.LENGTH_CONSTR_MAX) for i in range(np.ndim(R_true))]]
# Corrupt data (if args.PROB > 0)
X, R = gprutils.corrupt_data_xy(X_true, R_true, args.PROB)
# Directory to save results
if not os.path.exists(args.SAVEDIR):
    os.makedirs(args.SAVEDIR)
# Reconstruct the corrupt data. Initalize our "reconstructor" first.
reconstr = gpr.reconstructor(X,
                             R,
                             X_true,
                             args.KERNEL,
                             LENGTH_CONSTR,
Ejemplo n.º 8
0
                    help="Directory to save outputs")
args = parser.parse_args()

# Load "ground truth" data (N x M x L spectroscopic grid)
# (in real experiment we will just get an empty array)
R_true = np.load(args.FILEPATH)
if args.NORMALIZE and np.isnan(R_true).any() is False:
    R_true = (R_true - np.amin(R_true)) / np.ptp(R_true)
# Make initial set of measurements for exploration analysis.
# Let's start with "opening" several points along each edge
R = R_true * 0
R[R == 0] = np.nan
R = gprutils.open_edge_points(R, R_true)
# Get sparse and full grid indices
X = gprutils.get_sparse_grid(R)
X_true = gprutils.get_full_grid(R)
dist_edge = [0, 0]  # set to non-zero vals when edge points are not "opened"
# Construct lengthscale constraints for all 3 dimensions
LENGTH_CONSTR = [[float(args.LENGTH_CONSTR_MIN) for i in range(3)],
                 [float(args.LENGTH_CONSTR_MAX) for i in range(3)]]
# Run exploratory analysis
uncert_idx_all, uncert_val_all, mean_all, sd_all, R_all = [], [], [], [], []
if not os.path.exists(args.SAVEDIR): os.makedirs(args.SAVEDIR)
indpts_r = args.INDUCING_POINTS_RATIO
for i in range(args.ESTEPS):
    print('Exploration step {}/{}'.format(i, args.ESTEPS))
    # Make the number of inducing points dependent on the number of datapoints
    indpoints = len(gprutils.prepare_training_data(X, R)[0]) // indpts_r
    # clip to make sure it fits into GPU memory
    indpoints = 2000 if indpoints > 2000 else indpoints
    # Initialize explorer