Esempio n. 1
0
def test_seed():
    set_global_seed(10)
    out1 = uniform(size=[10, 10])
    out2 = uniform(size=[10, 10])
    assert not (out1.numpy() == out2.numpy()).all()

    set_global_seed(10)
    out3 = uniform(size=[10, 10])
    np.testing.assert_allclose(out1.numpy(), out3.numpy(), atol=1e-6)

    set_global_seed(11)
    out4 = uniform(size=[10, 10])
    assert not (out1.numpy() == out4.numpy()).all()
Esempio n. 2
0
 def fn(shape):
     o1 = random.uniform(0, 1, shape)
     o2 = random.normal(0, 1, shape)
     o3 = random.gamma(2, 1, shape)
     o4 = random.beta(2, 1, shape)
     o5 = random.poisson(2, shape)
     return o1, o2, o3, o4, o5
Esempio n. 3
0
def test_subtensor_when_shape_invalid():
    @jit.trace(symbolic=True, capture_as_const=True)
    def fun(inp):
        shape = inp.shape
        H = shape[-1]
        NH = H * 8 + 4
        arr = F.arange(4, NH, 8)
        arr_shape = arr.shape
        return arr_shape[0]

    inp = rand.uniform(size=[1, 3, 224, 224])
    fun(inp)

    with NamedTemporaryFile() as f:
        fun.dump(f.name, arg_names=["data"], optimize_for_inference=True)
        inp = rand.uniform(size=[1, 3, 512, 512])
        net = cgtools.GraphInference(f.name)
        net.run(inp_dict={"data": inp})
Esempio n. 4
0
def _bernoulli_sample_masks(masks, num_samples, sample_value):
    """ Using the bernoulli sampling method"""
    sample_mask = F.equal(masks, sample_value)
    num_mask = sample_mask.sum()
    num_final_samples = F.minimum(num_mask, num_samples)
    # here, we use the bernoulli probability to sample the anchors
    sample_prob = num_final_samples / num_mask
    uniform_rng = rand.uniform(sample_mask.shapeof()[0])
    after_sampled_mask = (uniform_rng <= sample_prob) * sample_mask
    return after_sampled_mask
Esempio n. 5
0
    def _bernoulli_sample_labels(
        self, labels, num_samples, sample_value, ignore_label=-1
    ):
        """ Using the bernoulli sampling method"""
        sample_label_mask = (labels == sample_value)
        num_mask = sample_label_mask.sum()
        num_final_samples = F.minimum(num_mask, num_samples)
        # here, we use the bernoulli probability to sample the anchors
        sample_prob = num_final_samples / num_mask
        uniform_rng = rand.uniform(sample_label_mask.shapeof(0))
        to_ignore_mask = (uniform_rng >= sample_prob) * sample_label_mask
        labels = labels * (1 - to_ignore_mask) + to_ignore_mask * ignore_label

        return labels
Esempio n. 6
0
def _bernoulli_sample_labels(labels,
                             num_samples,
                             sample_value,
                             ignore_label=-1):
    """ Using the bernoulli sampling method"""
    sample_label_mask = F.equal(labels, sample_value)
    num_mask = sample_label_mask.sum()
    num_final_samples = F.minimum(num_mask, num_samples)
    # here, we use the bernoulli probability to sample the anchors
    sample_prob = num_final_samples / num_mask
    uniform_rng = rand.uniform(sample_label_mask.shapeof()[0])
    disable_mask = (uniform_rng >= sample_prob) * sample_label_mask
    #TODO check cudaerror: illegal memory access was encountered
    labels = labels * (1 - disable_mask) + disable_mask * ignore_label

    return labels
Esempio n. 7
0
def sample_mask_from_labels(labels, num_sample, sample_value):
    """generate mask for labels using sampling method.

    Args:
        labels (Tensor):
        num_sample (int):
        sample_value (int):

    Returns:
        sample_mask (Tensor)
    """
    assert labels.ndim == 1, "Only tensor of dim 1 is supported."
    # TODO: support bool mask
    sample_mask = (labels == sample_value).astype("float32")
    num_mask = sample_mask.sum().astype("int32")
    if num_mask <= num_sample:
        return sample_mask

    random_tensor = sample_mask * uniform(size=labels.shape)
    _, sampled_idx = F.topk(random_tensor, k=num_sample - num_mask)
    sample_mask[sampled_idx] = F.zeros(sampled_idx.shape)

    return sample_mask
Esempio n. 8
0
def sample_labels(labels, num_samples, label_value, ignore_label=-1):
    """sample N labels with label value = sample_labels

    Args:
        labels(Tensor): shape of label is (N,)
        num_samples(int):
        label_value(int):

    Returns:
        label(Tensor): label after sampling
    """
    assert labels.ndim == 1, "Only tensor of dim 1 is supported."
    mask = (labels == label_value)
    num_valid = mask.sum()
    if num_valid <= num_samples:
        return labels

    random_tensor = F.zeros_like(labels).astype("float32")
    random_tensor[mask] = uniform(size=num_valid)
    _, invalid_inds = F.topk(random_tensor, k=num_samples - num_valid)

    labels[invalid_inds] = ignore_label
    return labels
Esempio n. 9
0
 def graph_b():
     return R.uniform(5) + R.gaussian(5)
Esempio n. 10
0
def test_range_uniform_dynamic_same_result():
    R.manual_seed(0)
    a = R.uniform(5, low=-2, high=2)
    R.manual_seed(0)
    b = R.uniform(5, low=-2, high=2)
    assert np.all(a.numpy() == b.numpy())
Esempio n. 11
0
def test_range_uniform_dynamic_diff_result():
    a = R.uniform(5, low=-2, high=2)
    b = R.uniform(5, low=-2, high=2)
    assert np.any(a.numpy() != b.numpy())
Esempio n. 12
0
 def graph_b():
     R.manual_seed(731)
     return R.uniform(5, low=-2, high=2)
Esempio n. 13
0
 def graph_b():
     return R.uniform(5, low=-2, high=2)
Esempio n. 14
0
def test_random_dynamic_same_result():
    R.manual_seed(0)
    a = R.uniform(5) + R.gaussian(5)
    R.manual_seed(0)
    b = R.uniform(5) + R.gaussian(5)
    assert np.all(a.numpy() == b.numpy())
Esempio n. 15
0
def test_random_dynamic_diff_result():
    a = R.uniform(5) + R.gaussian(5)
    b = R.uniform(5) + R.gaussian(5)
    assert np.any(a.numpy() != b.numpy())
Esempio n. 16
0
 def graph_b():
     R.manual_seed(731)
     return R.uniform(5) + R.gaussian(5)
Esempio n. 17
0
 def fwd():
     x = rand.uniform(size=(2, 2))
     y = rand.normal(size=(1, 3, 3, 3))
     return x, y