Exemplo n.º 1
0
 def soft_dc(self, x, input):
     if self.space == 'img-space':
         x = T.fft2(sens_expand(x, input['sens_maps']))
     x = torch.where(input['mask'], x - input['kspace'], self.zero)
     if self.space == 'img-space':
         x = sens_reduce(T.ifft2(x), input['sens_maps'])
     return self.lambda_ * x
Exemplo n.º 2
0
 def forward(self, x):
     # if torch.any(torch.isnan(x)):
     #     pdb.set_trace()
     y = T.ifft2(x)
     # if torch.any(torch.isnan(y)):
     #     pdb.set_trace()
     return y
Exemplo n.º 3
0
    def net_forward(self, x, input):
        if self.space == 'k-space':
            x = sens_reduce(T.ifft2(x), input['sens_maps'])

        x = merge_multi_slice(x, cat_dim=-2).unsqueeze(1).contiguous()
        x = self.net(x)
        x = unmerge_multi_slice(x, 2).contiguous()

        if self.space == 'k-space':
            x = T.fft2(sens_expand(x, input['sens_maps']))
        return x
Exemplo n.º 4
0
def test_ifft2(shape):
    shape = shape + [2]
    input = create_input(shape)
    out_torch = transforms.ifft2(input).numpy()
    out_torch = out_torch[..., 0] + 1j * out_torch[..., 1]

    input_numpy = utils.tensor_to_complex_np(input)
    input_numpy = np.fft.ifftshift(input_numpy, (-2, -1))
    out_numpy = np.fft.ifft2(input_numpy, norm='ortho')
    out_numpy = np.fft.fftshift(out_numpy, (-2, -1))
    assert np.allclose(out_torch, out_numpy)
Exemplo n.º 5
0
 def soft_dc(self, x, input):
     # if torch.any(torch.isnan(x)):
     #     pdb.set_trace()
     if self.space == 'img-space':
         x = T.fft2(sens_expand(x, input['sens_maps']))
     # if torch.any(torch.isnan(x)):
     #     pdb.set_trace()
     x = torch.where(input['mask'], x - input['kspace'], self.zero)
     if self.space == 'img-space':
         x = sens_reduce(T.ifft2(x), input['sens_maps'])
     # if torch.any(torch.isnan(x)):
     #     pdb.set_trace()
     return self.lambda_ * x
Exemplo n.º 6
0
    def net_forward(self, x, input):
        xinitial = x
        # if torch.any(torch.isnan(x)):
        #     pdb.set_trace()
        if self.space == 'k-space':
            x = sens_reduce(T.ifft2(x), input['sens_maps'])
        # if torch.any(torch.isnan(x)):
        #     pdb.set_trace()
        x = merge_multi_slice(x, cat_dim=-2).unsqueeze(1).contiguous()
        x = self.net(x)
        # if torch.any(torch.isnan(x)):
        #     pdb.set_trace()
        x = unmerge_multi_slice(x, 2).contiguous()

        if self.space == 'k-space':
            x = T.fft2(sens_expand(x, input['sens_maps']))
        # if torch.any(torch.isnan(x)):
        #     pdb.set_trace()
        return x
Exemplo n.º 7
0
    def __call__(self, target_ksp, target_im, attrs, fname, slice):
        kspace_np = target_ksp
        target_im = transforms.to_tensor(target_im)
        target_ksp = transforms.to_tensor(target_ksp)

        if self.args.coil_compress_coils:
            target_ksp = transforms.coil_compress(target_ksp, self.args.coil_compress_coils)

        if self.args.calculate_offsets_directly:
            krow = kspace_np.sum(axis=(0,1)) # flatten to a single row
            width = len(krow)
            offset = (krow != 0).argmax()
            acq_start = offset
            acq_end = width - (krow[::-1] != 0).argmax() #exclusive
        else:
            offset = None # Mask will pick randomly
            if self.partition == 'val' and 'mask_offset' in attrs:
                offset = attrs['mask_offset']

            acq_start = attrs['padding_left']
            acq_end = attrs['padding_right']

        #pdb.set_trace()

        seed = None if not self.use_seed else tuple(map(ord, fname))
        input_ksp, mask, num_lf = transforms.apply_mask(
            target_ksp, self.mask_func, 
            seed, offset,
            (acq_start, acq_end))

        #pdb.set_trace()

        sens_map = torch.Tensor(0)
        if self.args.compute_sensitivities:
            start_of_center_mask = (kspace_np.shape[-1] - num_lf + 1) // 2
            end_of_center_mask = start_of_center_mask + num_lf
            sens_map = est_sens_maps(kspace_np, start_of_center_mask, end_of_center_mask)
            sens_map = transforms.to_tensor(sens_map)

        if self.args.grappa_input:
            with h5py.File(self.args.grappa_input_path / self.partition / fname, 'r') as hf:
                kernel = transforms.to_tensor(hf['kernel'][slice])
                input_ksp = transforms.apply_grappa(input_ksp, kernel, target_ksp, mask)

        grappa_kernel = torch.Tensor(0)
        if self.args.grappa_path is not None:
            with h5py.File(self.args.grappa_path / self.partition / fname, 'r') as hf:
                grappa_kernel = transforms.to_tensor(hf['kernel'][slice])

        if self.args.grappa_target:
            with h5py.File(self.args.grappa_target_path / self.partition / fname, 'r') as hf:
                kernel = transforms.to_tensor(hf['kernel'][slice])
                target_ksp = transforms.apply_grappa(target_ksp.clone(), kernel, target_ksp, mask, sample_accel=2)
                target_im = transforms.root_sum_of_squares(transforms.complex_abs(transforms.ifft2(target_ksp)))

        input_im = transforms.ifft2(input_ksp)
        if not self.args.scale_inputs:
            scale = torch.Tensor([1.])
        else:
            abs_input = transforms.complex_abs(input_im)
            if self.args.scale_type == 'max':
                scale = torch.max(abs_input)
            else:
                scale = torch.mean(abs_input)

            input_ksp /= scale
            target_ksp /= scale
            target_im /= scale

        scale = scale.view([1, 1, 1])
        attrs_dict = dict(**attrs)

        return OrderedDict(
            input = input_ksp,
            target = target_ksp,
            target_im = target_im,
            mask = mask,
            grappa_kernel = grappa_kernel,
            scale = scale,
            attrs_dict = attrs_dict,
            fname = fname,
            slice = slice,
            num_lf = num_lf,
            sens_map = sens_map,
        )
Exemplo n.º 8
0
 def forward(self, x):
     return T.ifft2(x)