def _is_bval_type_a(grouped_dicoms): """ Check if the bvals are stored in the first of 2 currently known ways for single frame dti """ bval_tag = Tag(0x2001, 0x1003) bvec_x_tag = Tag(0x2005, 0x10b0) bvec_y_tag = Tag(0x2005, 0x10b1) bvec_z_tag = Tag(0x2005, 0x10b2) for group in grouped_dicoms: if bvec_x_tag in group[0] and _is_float(common.get_fl_value(group[0][bvec_x_tag])) and \ bvec_y_tag in group[0] and _is_float(common.get_fl_value(group[0][bvec_y_tag])) and \ bvec_z_tag in group[0] and _is_float(common.get_fl_value(group[0][bvec_z_tag])) and \ bval_tag in group[0] and _is_float(common.get_fl_value(group[0][bval_tag])) and \ common.get_fl_value(group[0][bval_tag]) != 0: return True return False
def _create_singleframe_bvals_bvecs(grouped_dicoms, bval_file, bvec_file, nifti, nifti_file): """ Write the bvals from the sorted dicom files to a bval file """ # create the empty arrays bvals = numpy.zeros([len(grouped_dicoms)], dtype=numpy.int32) bvecs = numpy.zeros([len(grouped_dicoms), 3]) # loop over all timepoints and create a list with all bvals and bvecs if _is_bval_type_a(grouped_dicoms): bval_tag = Tag(0x2001, 0x1003) bvec_x_tag = Tag(0x2005, 0x10b0) bvec_y_tag = Tag(0x2005, 0x10b1) bvec_z_tag = Tag(0x2005, 0x10b2) for stack_index in range(0, len(grouped_dicoms)): bvals[stack_index] = common.get_fl_value( grouped_dicoms[stack_index][0][bval_tag]) bvecs[stack_index, :] = [ common.get_fl_value( grouped_dicoms[stack_index][0][bvec_x_tag]), common.get_fl_value( grouped_dicoms[stack_index][0][bvec_y_tag]), common.get_fl_value(grouped_dicoms[stack_index][0][bvec_z_tag]) ] elif _is_bval_type_b(grouped_dicoms): bval_tag = Tag(0x0018, 0x9087) bvec_tag = Tag(0x0018, 0x9089) for stack_index in range(0, len(grouped_dicoms)): bvals[stack_index] = common.get_fd_value( grouped_dicoms[stack_index][0][bval_tag]) bvecs[stack_index, :] = common.get_fd_array_value( grouped_dicoms[stack_index][0][bvec_tag], 3) # truncate nifti if needed nifti, bvals, bvecs = _fix_diffusion_images(bvals, bvecs, nifti, nifti_file) # save the found bvecs to the file if numpy.count_nonzero(bvals) > 0 or numpy.count_nonzero(bvecs) > 0: common.write_bval_file(bvals, bval_file) common.write_bvec_file(bvecs, bvec_file) else: bval_file = None bvec_file = None return bval_file, bvec_file
def _create_singleframe_bvals_bvecs(grouped_dicoms, bval_file, bvec_file, nifti, nifti_file): """ Write the bvals from the sorted dicom files to a bval file """ # create the empty arrays bvals = numpy.zeros([len(grouped_dicoms)], dtype=numpy.int32) bvecs = numpy.zeros([len(grouped_dicoms), 3]) # loop over all timepoints and create a list with all bvals and bvecs if _is_bval_type_a(grouped_dicoms): bval_tag = Tag(0x2001, 0x1003) bvec_x_tag = Tag(0x2005, 0x10b0) bvec_y_tag = Tag(0x2005, 0x10b1) bvec_z_tag = Tag(0x2005, 0x10b2) for stack_index in range(0, len(grouped_dicoms)): bvals[stack_index] = common.get_fl_value(grouped_dicoms[stack_index][0][bval_tag]) bvecs[stack_index, :] = [common.get_fl_value(grouped_dicoms[stack_index][0][bvec_x_tag]), common.get_fl_value(grouped_dicoms[stack_index][0][bvec_y_tag]), common.get_fl_value(grouped_dicoms[stack_index][0][bvec_z_tag])] elif _is_bval_type_b(grouped_dicoms): bval_tag = Tag(0x0018, 0x9087) bvec_tag = Tag(0x0018, 0x9089) for stack_index in range(0, len(grouped_dicoms)): bvals[stack_index] = common.get_fd_value(grouped_dicoms[stack_index][0][bval_tag]) bvecs[stack_index, :] = common.get_fd_array_value(grouped_dicoms[stack_index][0][bvec_tag], 3) # truncate nifti if needed nifti, bvals, bvecs = _fix_diffusion_images(bvals, bvecs, nifti, nifti_file) # save the found bvecs to the file if numpy.count_nonzero(bvals) > 0 or numpy.count_nonzero(bvecs) > 0: common.write_bval_file(bvals, bval_file) common.write_bvec_file(bvecs, bvec_file) else: bval_file = None bvec_file = None bvals = None bvecs = None return nifti, bvals, bvecs, bval_file, bvec_file
def _multiframe_to_block(multiframe_dicom): """ Generate a full datablock containing all stacks """ # Calculate the amount of stacks and slices in the stack number_of_stack_slices = int( common.get_ss_value(multiframe_dicom[Tag(0x2001, 0x105f)][0][Tag( 0x2001, 0x102d)])) number_of_stacks = int( int(multiframe_dicom.NumberOfFrames) / number_of_stack_slices) # We create a numpy array size_x = multiframe_dicom.pixel_array.shape[2] size_y = multiframe_dicom.pixel_array.shape[1] size_z = number_of_stack_slices size_t = number_of_stacks # get the format format_string = common.get_numpy_type(multiframe_dicom) # get header info needed for ordering frame_info = multiframe_dicom[0x5200, 0x9230] data_4d = numpy.zeros((size_z, size_y, size_x, size_t), dtype=format_string) # loop over each slice and insert in datablock t_location_index = _get_t_position_index(multiframe_dicom) for slice_index in range(0, size_t * size_z): z_location = frame_info[slice_index].FrameContentSequence[ 0].InStackPositionNumber - 1 if t_location_index is None: t_location = frame_info[slice_index].FrameContentSequence[ 0].TemporalPositionIndex - 1 else: t_location = frame_info[slice_index].FrameContentSequence[ 0].DimensionIndexValues[t_location_index] - 1 block_data = multiframe_dicom.pixel_array[slice_index, :, :] # apply scaling rescale_intercept = frame_info[ slice_index].PixelValueTransformationSequence[0].RescaleIntercept rescale_slope = frame_info[ slice_index].PixelValueTransformationSequence[0].RescaleSlope private_scale_slope = 1.0 private_scale_intercept = 0.0 private_sequence_tag = Tag(0x2005, 0x140f) private_scale_intercept_tag = Tag(0x2005, 0x100d) private_scale_slope_tag = Tag(0x2005, 0x100e) if private_sequence_tag in frame_info[slice_index]: if private_scale_intercept_tag in frame_info[slice_index][ private_sequence_tag][0]: private_scale_intercept = common.get_fl_value( frame_info[slice_index][private_sequence_tag][0] [private_scale_intercept_tag]) if private_scale_slope_tag in frame_info[slice_index][ private_sequence_tag][0]: private_scale_slope = common.get_fl_value( frame_info[slice_index][private_sequence_tag][0] [private_scale_slope_tag]) block_data = common.do_scaling(block_data, rescale_slope, rescale_intercept, private_scale_slope, private_scale_intercept) # switch to float if needed if block_data.dtype != data_4d.dtype: data_4d = data_4d.astype(block_data.dtype) data_4d[z_location, :, :, t_location] = block_data full_block = numpy.zeros((size_x, size_y, size_z, size_t), dtype=data_4d.dtype) # loop over each stack and reorganize the data for t_index in range(0, size_t): # transpose the block so the directions are correct data_3d = numpy.transpose(data_4d[:, :, :, t_index], (2, 1, 0)) # add the block the the full data full_block[:, :, :, t_index] = data_3d return full_block