Exemplo n.º 1
0
def to_array(obj, copy_array=False):
    if isinstance(obj, torch.Tensor):
        if len(obj.shape) == 1 or obj.shape[1] == 1:
            if copy_array:
                return torch.clone(obj)
            return obj
    return torch.ravel(torch.FloatTensor(np.array([v for v in obj])))
Exemplo n.º 2
0
    def validation_epoch_end(self, outputs: Dict[str, Any]):
        prediction = torch.cat([tmp["prediction"] for tmp in outputs])
        target = torch.cat([tmp["target"] for tmp in outputs])

        # --- masked cm, flatten all tensors, subset by forest pixels
        lu = torch.ravel(torch.cat([tmp["lu"] for tmp in outputs]))
        prediction_masked = torch.ravel(prediction)[lu == 1]
        target_masked = torch.ravel(target)[lu == 1]

        confusion_matrix = torchmetrics.functional.confusion_matrix(
            prediction, target, normalize="true", num_classes=len(self.classes)
        )

        confusion_matrix_masked = torchmetrics.functional.confusion_matrix(
            prediction_masked,
            target_masked,
            normalize="true",
            num_classes=len(self.classes),
        )

        dfs = {}
        for label, cm in zip(
            ["cm_norm", "cm_norm_masked"], [confusion_matrix, confusion_matrix_masked]
        ):
            dfs[label] = pd.DataFrame(
                cm.detach().cpu().numpy(),
                index=self.classes,
                columns=self.classes,
            )

        cm_chart = show_cm(dfs["cm_norm"], dfs["cm_norm_masked"], dpi=72, display=False)

        for logger in self.logger:
            if isinstance(logger, pl.loggers.wandb.WandbLogger):
                import wandb

                logger.experiment.log(
                    {
                        "Confusion matrix (Val)": wandb.Image(
                            cm_chart,
                            caption=f"CM-Val-Norm-{self.trainer.global_step}",
                        )
                    },
                    commit=False,
                )
Exemplo n.º 3
0
 def __init__(self, src='Hand', s=1E-5):
     P = np.load(src+'.npz'); self.data = []
     for i,(k,v) in enumerate(P.items()):
         self.data += [(i,p.astype('float32')) for p in v]
     # x_transform: flip + scale, keypoint jitter with magnitude s/2
     #self.x_transform = lambda x: torch.ravel(torch.randn(2)*(x+(torch.rand(x.shape)-0.5)*s))
     self.x_transform = lambda x: torch.ravel(torch.randn(2)*(1+(torch.rand(x.shape)-0.5)*s)*x)
     self.y_transform = None # lambda x: torch.tensor(x)
     self.cls = [k for k in P]; print(self.cls)
Exemplo n.º 4
0
def generate_coordinates(n: int) -> torch.Tensor:
    '''
    Genearates grid of 2D coordinates [0, n]x[n, 0]
    params
    -----------
    n: int
        Number of 2d points
    returns:
    -----------
    coord_abs: torch.ndarray
        image coordinates of (n**2) x 2 size
    '''
    #meshgrid of torch uses ij so lets just use it
    #rather than use np meshgrid and change the
    # indexing to ij
    with torch.no_grad():
        rows, cols = torch.meshgrid(torch.arange(n), torch.arange(n))
        # i , j format coordinates
        coords_abs = torch.stack(
            [torch.ravel(rows), torch.ravel(cols)], axis=-1)

    return coords_abs
Exemplo n.º 5
0
    def ravel(self, tensor):
        """
        Return a flattened view of the tensor, not a copy.

        Example:

            >>> import pyhf
            >>> pyhf.set_backend("pytorch")
            >>> tensor = pyhf.tensorlib.astensor([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
            >>> pyhf.tensorlib.ravel(tensor)
            tensor([1., 2., 3., 4., 5., 6.])

        Args:
            tensor (Tensor): Tensor object

        Returns:
            `torch.Tensor`: A flattened array.
        """
        return torch.ravel(tensor)
Exemplo n.º 6
0
 def other_ops(self):
     a = torch.randn(4)
     b = torch.randn(4)
     c = torch.randint(0, 8, (5, ), dtype=torch.int64)
     e = torch.randn(4, 3)
     f = torch.randn(4, 4, 4)
     size = [0, 1]
     dims = [0, 1]
     return (
         torch.atleast_1d(a),
         torch.atleast_2d(a),
         torch.atleast_3d(a),
         torch.bincount(c),
         torch.block_diag(a),
         torch.broadcast_tensors(a),
         torch.broadcast_to(a, (4)),
         # torch.broadcast_shapes(a),
         torch.bucketize(a, b),
         torch.cartesian_prod(a),
         torch.cdist(e, e),
         torch.clone(a),
         torch.combinations(a),
         torch.corrcoef(a),
         # torch.cov(a),
         torch.cross(e, e),
         torch.cummax(a, 0),
         torch.cummin(a, 0),
         torch.cumprod(a, 0),
         torch.cumsum(a, 0),
         torch.diag(a),
         torch.diag_embed(a),
         torch.diagflat(a),
         torch.diagonal(e),
         torch.diff(a),
         torch.einsum("iii", f),
         torch.flatten(a),
         torch.flip(e, dims),
         torch.fliplr(e),
         torch.flipud(e),
         torch.kron(a, b),
         torch.rot90(e),
         torch.gcd(c, c),
         torch.histc(a),
         torch.histogram(a),
         torch.meshgrid(a),
         torch.lcm(c, c),
         torch.logcumsumexp(a, 0),
         torch.ravel(a),
         torch.renorm(e, 1, 0, 5),
         torch.repeat_interleave(c),
         torch.roll(a, 1, 0),
         torch.searchsorted(a, b),
         torch.tensordot(e, e),
         torch.trace(e),
         torch.tril(e),
         torch.tril_indices(3, 3),
         torch.triu(e),
         torch.triu_indices(3, 3),
         torch.vander(a),
         torch.view_as_real(torch.randn(4, dtype=torch.cfloat)),
         torch.view_as_complex(torch.randn(4, 2)),
         torch.resolve_conj(a),
         torch.resolve_neg(a),
     )
Exemplo n.º 7
0
def degrees(M):
    signal = torch.reshape(torch.ones(M.shape[0]), (-1, 1))
    index, values = torch_sparse.transpose(M.index, M.values, M.shape[0],
                                           M.shape[1])
    return torch.ravel(
        torch_sparse.spmm(index, values, M.shape[1], M.shape[0], signal))
Exemplo n.º 8
0
def conv(signal, M):
    signal = torch.reshape(signal, (-1, 1))
    return torch.ravel(
        torch_sparse.spmm(M.index, M.values, M.shape[0], M.shape[1], signal))
Exemplo n.º 9
0
    def test_epoch_end(self, outputs: Dict[str, Any]):

        # --- original cm
        prediction = torch.cat([tmp["prediction"] for tmp in outputs])
        target = torch.cat([tmp["target"] for tmp in outputs])

        # --- masked cm, flatten all tensors, subset by forest pixels
        lu = torch.ravel(torch.cat([tmp["lu"] for tmp in outputs]))
        prediction_masked = torch.ravel(prediction)[lu == 1]
        target_masked = torch.ravel(target)[lu == 1]

        confusion_matrix = torchmetrics.functional.confusion_matrix(
            prediction, target, normalize="true", num_classes=len(self.classes)
        )

        confusion_matrix_px = torchmetrics.functional.confusion_matrix(
            prediction, target, num_classes=len(self.classes)
        )

        confusion_matrix_masked = torchmetrics.functional.confusion_matrix(
            prediction_masked,
            target_masked,
            normalize="true",
            num_classes=len(self.classes),
        )

        confusion_matrix_masked_px = torchmetrics.functional.confusion_matrix(
            prediction_masked, target_masked, num_classes=len(self.classes)
        )

        dfs = {}
        for label, cm in zip(
            ["cm_norm", "cm_px", "cm_norm_masked", "cm_px_masked"],
            [
                confusion_matrix,
                confusion_matrix_px,
                confusion_matrix_masked,
                confusion_matrix_masked_px,
            ],
        ):
            dfs[label] = pd.DataFrame(
                cm.detach().cpu().numpy(),
                index=self.classes,
                columns=self.classes,
            )

        cm_chart = show_cm(dfs["cm_norm"], dfs["cm_norm_masked"], dpi=72, display=False)
        cm_chart_px = show_cm(dfs["cm_px"], dfs["cm_px_masked"], dpi=72, display=False)

        for logger in self.logger:
            if isinstance(logger, pl.loggers.wandb.WandbLogger):
                import wandb

                logger.experiment.log(
                    {
                        "Confusion Matrix (Test) - Normalized": wandb.Image(
                            cm_chart,
                            caption=f"CM-Test-Norm-{self.trainer.global_step}",
                        ),
                        "Confusion Matrix (Test) - Pixel": wandb.Image(
                            cm_chart_px,
                            caption=f"CM-Test-Px-{self.trainer.global_step}",
                        ),
                    },
                    commit=False,
                )

        log.info(f"CM - DEFAULT - NORMALIZED: {dfs['cm_norm'].to_string()}")
        log.info(f"CM - FORESTONLY - NORMALIZED: {dfs['cm_norm_masked'].to_string()}")
        log.info(f"CM - DEFAULT - PIXEL: {dfs['cm_px'].to_string()}")
        log.info(f"CM - FORESTONLY - PIXEL: {dfs['cm_px_masked'].to_string()}")
Exemplo n.º 10
0
x_img = torch.rand(64)
print(x_img.shape)

x_image_reshaped = x_img.reshape(8, 8)
print(x_image_reshaped.shape)

plt.imshow(x_image_reshaped, cmap='gray')
plt.show()
''' Make 3 channel image '''
channels = 3
image_size = 1728

x_rgb_img = torch.randint(0, 255, (1728, 1))
print(x_rgb_img.shape)

x_rgb_img = torch.ravel(x_rgb_img).reshape(3, 24, 24)
print(x_rgb_img.shape)
x_rgb_img = x_rgb_img.permute(2, 1, 0)
print(x_rgb_img.shape)
# matplot lib cannot display image as channel first
# so we need to reshape the image to channel at end

plt.imshow(x_rgb_img)
plt.show()

# using numpy
x = np.random.random((10, 10, 3))
print(f'numpy array x shape: {x.shape}')
print(x)
plt.imshow(x)
plt.show()