Ejemplo n.º 1
0
    def __call__(self, x: jnp.ndarray):
        x = eg.Linear(self.dmid)(x)
        x = eg.BatchNorm()(x)
        x = jax.nn.relu(x)

        x = eg.Linear(self.dout)(x)
        return x
Ejemplo n.º 2
0
    def __call__(self, x: jnp.ndarray) -> jnp.ndarray:
        # Normalize the input
        x = x.astype(jnp.float32) / 255.0

        # Block 1
        x = eg.Conv(32, [3, 3], strides=[2, 2])(x)
        x = eg.Dropout(0.05)(x)
        x = jax.nn.relu(x)

        # Block 2
        x = eg.Conv(64, [3, 3], strides=[2, 2])(x)
        x = eg.BatchNorm()(x)
        x = eg.Dropout(0.1)(x)
        x = jax.nn.relu(x)

        # Block 3
        x = eg.Conv(128, [3, 3], strides=[2, 2])(x)

        # Global average pooling
        x = x.mean(axis=(1, 2))

        # Classification layer
        x = eg.Linear(10)(x)

        return x
Ejemplo n.º 3
0
def ConvBlock(
    x,
    units: int,
    kernel: Tuple[int, int],
    stride: int = 1,
):
    x = eg.Conv(
        units,
        kernel,
        strides=[stride, stride],
        padding="same",
    )(x)
    x = eg.BatchNorm()(x)
    x = eg.Dropout(0.2)(x)
    return jax.nn.relu(x)