Пример #1
0
def write_dicom(pixel_array, filename):
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = 'Secondary Capture Image Storage'
    file_meta.MediaStorageSOPInstanceUID = '1.3.6.1.4.1.9590.100.1.1.111165684411017669021768385720736873780'
    file_meta.ImplementationClassUID = '1.3.6.1.4.1.9590.100.1.0.100.4.0'
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
    ds.Modality = 'WSD'
    ds.ContentDate = str(datetime.date.today()).replace('-', '')
    ds.ContentTime = str(time.time())  #milliseconds since the epoch
    ds.StudyInstanceUID = '1.3.6.1.4.1.9590.100.1.1.124313977412360175234271287472804872093'
    ds.SeriesInstanceUID = '1.3.6.1.4.1.9590.100.1.1.369231118011061003403421859172643143649'
    ds.SOPInstanceUID = '1.3.6.1.4.1.9590.100.1.1.111165684411017669021768385720736873780'
    ds.SOPClassUID = 'Secondary Capture Image Storage'
    ds.SecondaryCaptureDeviceManufctur = 'Python 2.7.3'

    ## These are the necessary imaging components of the FileDataset object.
    ds.SamplesPerPixel = 1
    ds.PhotometricInterpretation = "MONOCHROME2"
    ds.PixelRepresentation = 0
    ds.HighBit = 15
    ds.BitsStored = 16
    ds.BitsAllocated = 16
    ds.SmallestImagePixelValue = '\\x00\\x00'
    ds.LargestImagePixelValue = '\\xff\\xff'
    ds.Columns = pixel_array.shape[0]
    ds.Rows = pixel_array.shape[1]
    if pixel_array.dtype != np.uint16:
        pixel_array = pixel_array.astype(np.uint16)
    ds.PixelData = pixel_array.tostring()

    ds.save_as(filename)
Пример #2
0
 def OnReceiveStore(self, SOPClass, DS):
     # sleep(0.1)
     rl.info('Received C-STORE')
     # do something with dataset. For instance, store it on disk.
     file_meta = Dataset()
     file_meta.MediaStorageSOPClassUID = DS.SOPClassUID
     file_meta.MediaStorageSOPInstanceUID = UID.generate_uid() #DS.SOPInstanceUID  # !! Need valid UID here
     file_meta.ImplementationClassUID = UID.pydicom_root_UID #"1.2.3.4"  # !!! Need valid UIDs here
     file_path = directory.joinPath([self.StorePath, str(DS.SeriesDate), str(DS.SeriesDescription)])
     if fmriQA.is_dicom_dict_QA(DS):
         directory.createPath(file_path)
         filename = directory.joinPath([file_path, "I%05d" % DS.get('InstanceNumber') + '.'+config.DATA_EXT])
         ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
         ds.update(DS)
         ds.is_little_endian = True
         ds.is_implicit_VR = True
         # print ds - prints all DICOM tags contained in the file prior its saving
         ds.save_as(filename)
         if directory.isFile(filename):
             rl.info('File %s written' % filename)
             # must return appropriate status
             return SOPClass.Success
         else:
             return SOPClass.UnableToProcess
             rl.error('File %s failed to write' % filename)
     else:
         return SOPClass.IdentifierDoesNotMatchSOPClass
         rl.warning('The sent file was not recognised as QA file (%s)' % filename)
Пример #3
0
    def create_dicom_contours(self):
        """ Creates and returns a list of Dicom CONTOUR objects from self.
        """

        # in order to get DICOM readable by Eclipse we need to connect each contour with CT slice
        # CT slices are identified by SOPInstanceUID
        # first we assume some default value if we cannot figure out CT slice info (i.e. CT cube is not loaded)
        ref_sop_instance_uid = '1.2.3'

        # then we check if CT cube is loaded
        if self.cube is not None:

            # if CT cube is loaded we extract DICOM representation of the cube (1 dicom per slice)
            # and select DICOM object for current slice based on slice position
            # it is time consuming as for each call of this method we generate full DICOM representation (improve!)
            candidates = [dcm for dcm in self.cube.create_dicom() if dcm.SliceLocation == self.get_position()]
            if len(candidates) > 0:
                # finally we extract CT slice SOP Instance UID
                ref_sop_instance_uid = candidates[0].SOPInstanceUID

        contour_list = []
        for item in self.contour:
            con = Dataset()
            contour = []
            for p in item.contour:
                contour.extend([p[0], p[1], p[2]])
            con.ContourData = contour
            con.ContourGeometricType = 'CLOSED_PLANAR'
            con.NumberofContourPoints = item.number_of_points()
            cont_image_item = Dataset()
            cont_image_item.ReferencedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'  # CT Image Storage SOP Class
            cont_image_item.ReferencedSOPInstanceUID = ref_sop_instance_uid  # CT slice Instance UID
            con.ContourImageSequence = Sequence([cont_image_item])
            contour_list.append(con)
        return contour_list
Пример #4
0
 def testUID(self):
     """DataElement: setting or changing UID results in UID type........."""
     ds = Dataset()
     ds.TransferSyntaxUID = "1.2.3"
     self.assert_(type(ds.TransferSyntaxUID) is UID, "Assignment to UID did not create UID class")
     ds.TransferSyntaxUID += ".4.5.6"
     self.assert_(type(ds.TransferSyntaxUID) is UID, "+= to UID did not keep as UID class")
Пример #5
0
def create_dicom(private_tag,payload,filename):
    """ Function creates minimal dicom file from scratch with required tags
        and stores payload (string) in the specified private tag.
    """

    # create empty dicomfile
    file_meta = Dataset()

    # Raw Data Storage
    file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.66'

    # unieke uid's
    file_meta.MediaStorageSOPInstanceUID = dicom.UID.generate_uid()
    file_meta.ImplementationClassUID = dicom.UID.generate_uid()

    ds = FileDataset(filename, {},file_meta = file_meta,preamble="\0"*128)

    ds.SOPClassUID = '1.2.840.10008.5.1.4.1.1.7' # secondary capture SOP UID
    ds.SOPInstanceUID = file_meta.MediaStorageSOPInstanceUID
    ds.StudyInstanceUID = dicom.UID.generate_uid()
    ds.SeriesInstanceUID = dicom.UID.generate_uid()

    ds.PatientID = PatientID
    ds.PatientName = PatientName
    ds.StudyDescription = StudyDescription
    ds.SeriesDescription = SeriesDescription
    ds.Modality = 'OT'
    ds.StudyDate = time.strftime('%Y%m%d')
    ds.SeriesDate = ds.StudyDate
    ds.ContentDate = ds.StudyDate
    ds.StudyTime = ds.SeriesTime = ds.ContentTime = time.strftime('%H%M%S')

    ds.add_new(private_tag,'OB', payload)

    ds.save_as(filename)
Пример #6
0
    def queryRetrievePatient(self,
                             patientId,
                             patientName,
                             remoteAe,
                             thread=None):
        """
        """
        queryLvl = "PATIENT"
        association = self._ae.RequestAssociation(remoteAe)
        result = self.find([association, queryLvl, patientName, patientId])

        for entity in result:
            if not entity[1]: continue

            d = Dataset()
            try:
                d.PatientID = entity[1].PatientID
            except Exception, err:
                self._logger.error(str(err))
                continue

            subAssociation = self._ae.RequestAssociation(remoteAe)
            generator = subAssociation.PatientRootMoveSOPClass.SCU(
                d, self._ae.name, 1)

            for subentity in generator:
                self._logger.info(subentity)

            subAssociation.Release(0)
Пример #7
0
 def OnReceiveStore(self, SOPClass, ds):
     # do something with dataset. For instance, store it.
     self.logger.debug(
         "Received C-STORE SeriesInstanceUID:'%s', SOPInstanceUID:'%s''" %
         (ds.SeriesInstanceUID, ds.SOPInstanceUID))
     file_meta = Dataset()
     file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
     # !! Need valid UID herecopy_dicom
     file_meta.MediaStorageSOPInstanceUID = "1.2.3"
     # !!! Need valid UIDs here
     file_meta.ImplementationClassUID = "1.2.3.4"
     folder = os.path.join(self.import_folder, ds.StudyID)
     if not os.path.isdir(folder):
         os.makedirs(folder)
     filename = '%s/%s.dcm' % (folder, ds.SOPInstanceUID)
     fileds = FileDataset(filename, {},
                          file_meta=file_meta,
                          preamble="\0" * 128)
     fileds.update(ds)
     fileds.save_as(filename)
     self.logger.info("file %s written" % filename)
     if self.onDicomSaved:
         self.logger.info("calling callback")
         self.onDicomSaved(filename)
     # must return appropriate status
     return SOPClass.Success
Пример #8
0
 def testSetNewDataElementByName(self):
     """Dataset: set new data_element by name............................."""
     ds = Dataset()
     ds.TreatmentMachineName = "unit #1"
     data_element = ds[0x300a, 0x00b2]
     self.assertEqual(data_element.value, "unit #1", "Unable to set data_element by name")
     self.assertEqual(data_element.VR, "SH", "data_element not the expected VR")
Пример #9
0
 def testUID(self):
     """DataElement: setting or changing UID results in UID type........."""
     ds = Dataset()
     ds.TransferSyntaxUID = "1.2.3"
     self.assert_(type(ds.TransferSyntaxUID) is UID, "Assignment to UID did not create UID class")
     ds.TransferSyntaxUID += ".4.5.6"
     self.assert_(type(ds.TransferSyntaxUID) is UID, "+= to UID did not keep as UID class")
Пример #10
0
def WriteDCMFile(pixel_array,filename):
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = 'RT Image Storage'
    file_meta.MediaStorageSOPInstanceUID = '1.3.6.1.4.1.9590.100.1.1.111165684411017669021768385720736873780'
    file_meta.ImplementationClassUID = '1.3.6.1.4.1.9590.100.1.0.100.4.0'
    ds = FileDataset(filename, {},file_meta = file_meta,preamble="\0"*128)
    ds.Modality = 'RTIMAGE'
    ds.ContentDate = str(datetime.date.today()).replace('-','')
    ds.ContentTime = str(time.time()) #milliseconds since the epoch
    ds.StudyInstanceUID =  '1.3.6.1.4.1.9590.100.1.1.124313977412360175234271287472804872093'
    ds.SeriesInstanceUID = '1.3.6.1.4.1.9590.100.1.1.369231118011061003403421859172643143649'
    ds.SOPInstanceUID =    '1.3.6.1.4.1.9590.100.1.1.111165684411017669021768385720736873780'
    ds.SOPClassUID = 'RT Image Storage'
    ds.SecondaryCaptureDeviceManufacturer = 'Varian Medical Systems'

    ## These are the necessary imaging components of the FileDataset object.
    ds.SamplesPerPixel = 1
    ds.ImagePlanePixelSpacing=[0.392,0.392]
    ds.PhotometricInterpretation = "MONOCHROME2"
    ds.PixelRepresentation = 0
    ds.HighBit = 15
    ds.BitsStored = 16
    ds.BitsAllocated = 16
    # ds.SmallestImagePixelValue = '\\x00\\x00'
    # ds.LargestImagePixelValue = '\\xff\\xff'
    ds.Columns =1024# pixel_array.shape[0]
    ds.Rows =764# pixel_array.shape[1]
    ds.RescaleSlope=1.0
    ds.RescaleIntercept=1.0
    # if type(pixel_array) != np.uint16:
    #     pixel_array =np.uint16(pixel_array)
    ds.PixelData = pixel_array
    ds.save_as(filename,write_like_original=True)
    return
Пример #11
0
def write_dicom(pixel_array, filename):
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = 'Secondary Capture Image Storage'
    file_meta.MediaStorageSOPInstanceUID = '1.3.6.1.4.1.9590.100.1.1.111165684'
    file_meta.MediaStorageSOPInstanceUID += '411017669021768385720736873780'
    file_meta.ImplementationClassUID = '1.3.6.1.4.1.9590.100.1.0.100.4.0'
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble=b"\0"*128)
    ds.Modality = 'MR'
    ds.ContentDate = str(date.today()).replace('-', '')
    ds.ContentTime = str(time())
    ds.StudyInstanceUID = '1.3.6.1.4.1.9590.100.1.1.1243139774123601752342712'
    ds.StudyInstanceUID += '87472804872093'
    ds.SeriesInstanceUID = '1.3.6.1.4.1.9590.100.1.1.369231118011061003403421'
    ds.SeriesInstanceUID += '859172643143649'
    ds.SOPInstanceUID = '1.3.6.1.4.1.9590.100.1.1.111165684411017669021768385'
    ds.SOPInstanceUID += '720736873780'
    ds.SOPClassUID = 'Secondary Capture Image Storage'
    ds.SecondaryCaptureDeviceManufctur = 'Python 3.3.5'
    # Options
    ds.InstitutionName = "Imperial College London"
    ds.RepetitionTime = 300
    # These are the necessary imaging components of the FileDataset object.
    ds.SamplesPerPixel = 1
    ds.PhotometricInterpretation = "MONOCHROME2"
    ds.PixelRepresentation = 0
    ds.HighBit = 15
    ds.BitsStored = 16
    ds.BitsAllocated = 16
    ds.Columns = pixel_array.shape[1]
    ds.Rows = pixel_array.shape[0]
    ds.PixelData = pixel_array.tostring()
    ds.save_as(filename)

    return 0
Пример #12
0
def read_partial(fileobj, stop_when=None, defer_size=None, force=False):
    """Parse a DICOM file until a condition is met

    ``read_partial`` is normally not called directly. Use ``read_file``
        instead, unless you need to stop on some condition
        other than reaching pixel data.

    :arg fileobj: a file-like object. This function does not close it.
    :arg stop_when: a callable which takes tag, VR, length,
                    and returns True or False.
                    If stop_when returns True,
                        read_data_element will raise StopIteration.
                    If None (default), then the whole file is read.
    :returns: a set instance
    """
    # Read preamble -- raise an exception if missing and force=False
    preamble = read_preamble(fileobj, force)
    file_meta_dataset = Dataset()
    # Assume a transfer syntax, correct it as necessary
    is_implicit_VR = True
    is_little_endian = True
    if preamble:
        file_meta_dataset = _read_file_meta_info(fileobj)
        transfer_syntax = file_meta_dataset.TransferSyntaxUID
        if transfer_syntax == dicom.UID.ImplicitVRLittleEndian:
            pass
        elif transfer_syntax == dicom.UID.ExplicitVRLittleEndian:
            is_implicit_VR = False
        elif transfer_syntax == dicom.UID.ExplicitVRBigEndian:
            is_implicit_VR = False
            is_little_endian = False
        elif transfer_syntax == dicom.UID.DeflatedExplicitVRLittleEndian:
            # See PS3.6-2008 A.5 (p 71)
            # when written, the entire dataset following
            #     the file metadata was prepared the normal way,
            #     then "deflate" compression applied.
            #  All that is needed here is to decompress and then
            #     use as normal in a file-like object
            zipped = fileobj.read()
            # -MAX_WBITS part is from comp.lang.python answer:
            # groups.google.com/group/comp.lang.python/msg/e95b3b38a71e6799
            unzipped = zlib.decompress(zipped, -zlib.MAX_WBITS)
            fileobj = BytesIO(unzipped)  # a file-like object
            is_implicit_VR = False
        else:
            # Any other syntax should be Explicit VR Little Endian,
            #   e.g. all Encapsulated (JPEG etc) are ExplVR-LE
            #        by Standard PS 3.5-2008 A.4 (p63)
            is_implicit_VR = False
    else:  # no header -- use the is_little_endian, implicit assumptions
        file_meta_dataset.TransferSyntaxUID = dicom.UID.ImplicitVRLittleEndian

    try:
        dataset = read_dataset(fileobj, is_implicit_VR, is_little_endian,
                            stop_when=stop_when, defer_size=defer_size)
    except EOFError as e:
        pass  # error already logged in read_dataset
    return FileDataset(fileobj, dataset, preamble, file_meta_dataset,
                                is_implicit_VR, is_little_endian)
Пример #13
0
 def testSetNonDicom(self):
     """Dataset: can set class instance property (non-dicom).............."""
     ds = Dataset()
     ds.SomeVariableName = 42
     has_it = hasattr(ds, 'SomeVariableName')
     self.assert_(has_it, "Variable did not get created")
     if has_it:
         self.assertEqual(ds.SomeVariableName, 42, "There, but wrong value")
Пример #14
0
    def create_dicom_structure_roi(self):
        """ Based on self.name, an empty Dicom ROI is generated.

        :returns: a Dicom ROI.
        """
        roi = Dataset()
        roi.ROIName = self.name
        return roi
Пример #15
0
 def testSetNonDicom(self):
     """Dataset: can set class instance property (non-dicom)............."""
     ds = Dataset()
     ds.SomeVariableName = 42
     has_it = hasattr(ds, 'SomeVariableName')
     self.assertTrue(has_it, "Variable did not get created")
     if has_it:
         self.assertEqual(ds.SomeVariableName, 42, "There, but wrong value")
Пример #16
0
def write_dicom(pixel_array, filename, ds_copy, ds_ori, volume_number,
                series_number, sop_id):
    """Write data in dicom file and copy the header from different dicoms.

    :param pixel_array: data to write in a dicom
    :param filename: file name for the dicom
    :param ds_copy: pydicom object of the dicom to copy info from
    :param ds_ori: pydicom object of the dicom where the array comes from
    :param volume_number: numero of volume being processed
    :param series_number: number of the series being written
    :param sop_id: SOPID for the dicom
    :return: None
    """
    # Set to zero negatives values in the image:
    pixel_array[pixel_array < 0] = 0

    # Set the DICOM dataset
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = 'Secondary Capture Image Storage'
    file_meta.MediaStorageSOPInstanceUID = ds_ori.SOPInstanceUID
    file_meta.ImplementationClassUID = ds_ori.SOPClassUID
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0"*128)

    # Copy the tag from the original DICOM
    for tag, value in ds_ori.items():
        if tag != ds_ori.data_element("PixelData").tag:
            ds[tag] = value

    # Other tags to set
    ds.SeriesNumber = series_number
    ds.SeriesDescription = ds_ori.SeriesDescription + ' reg_f3d'
    sop_uid = sop_id + str(datetime.datetime.now()).replace('-', '')\
                                                   .replace(':', '')\
                                                   .replace('.', '')\
                                                   .replace(' ', '')
    ds.SOPInstanceUID = sop_uid[:-1]
    ds.ProtocolName = ds_ori.ProtocolName
    ds.InstanceNumber = volume_number+1

    # Copy from T2 the orientation tags:
    ds.PatientPosition = ds_copy.PatientPosition
    ds[0x18, 0x50] = ds_copy[0x18, 0x50]  # Slice Thicknes
    ds[0x18, 0x88] = ds_copy[0x18, 0x88]  # Spacing Between Slices
    ds[0x18, 0x1312] = ds_copy[0x18, 0x1312]  # In-plane Phase Encoding
    ds[0x20, 0x32] = ds_copy[0x20, 0x32]  # Image Position
    ds[0x20, 0x37] = ds_copy[0x20, 0x37]  # Image Orientation
    ds[0x20, 0x1041] = ds_copy[0x20, 0x1041]  # Slice Location
    ds[0x28, 0x10] = ds_copy[0x28, 0x10]  # rows
    ds[0x28, 0x11] = ds_copy[0x28, 0x11]  # columns
    ds[0x28, 0x30] = ds_copy[0x28, 0x30]  # Pixel spacing

    # Set the Image pixel array
    if pixel_array.dtype != np.uint16:
        pixel_array = pixel_array.astype(np.uint16)
    ds.PixelData = pixel_array.tostring()

    # Save the image
    ds.save_as(filename)
Пример #17
0
    def create_dicom_label(self):
        """ Based on self.name and self.type, a Dicom ROI_LABEL is generated.

        :returns: a Dicom ROI_LABEL
        """
        roi_label = Dataset()
        roi_label.ROIObservationLabel = self.name
        roi_label.RTROIInterpretedType = self.get_roi_type_name(self.type)
        return roi_label
Пример #18
0
    def create_dicom(self):
        """ Creates a Dicom RT-Dose object from self.

        This function can be used to convert a TRiP98 Dose file to Dicom format.

        :returns: a Dicom RT-Dose object.
        """

        if not _dicom_loaded:
            raise ModuleNotLoadedError("Dicom")
        if not self.header_set:
            raise InputError("Header not loaded")

        ds = self.create_dicom_base()
        ds.Modality = 'RTDOSE'
        ds.SamplesperPixel = 1
        ds.BitsAllocated = self.num_bytes * 8
        ds.BitsStored = self.num_bytes * 8
        ds.AccessionNumber = ''
        ds.SeriesDescription = 'RT Dose'
        ds.DoseUnits = 'GY'
        ds.DoseType = 'PHYSICAL'
        ds.DoseGridScaling = self.target_dose / 10**5
        ds.DoseSummationType = 'PLAN'
        ds.SliceThickness = ''
        ds.InstanceCreationDate = '19010101'
        ds.InstanceCreationTime = '000000'
        ds.NumberOfFrames = len(self.cube)
        ds.PixelRepresentation = 0
        ds.StudyID = '1'
        ds.SeriesNumber = 14
        ds.GridFrameOffsetVector = [
            x * self.slice_distance for x in range(self.dimz)
        ]
        ds.InstanceNumber = ''
        ds.NumberofFrames = len(self.cube)
        ds.PositionReferenceIndicator = "RF"
        ds.TissueHeterogeneityCorrection = ['IMAGE', 'ROI_OVERRIDE']
        ds.ImagePositionPatient = [
            "%.3f" % (self.xoffset * self.pixel_size),
            "%.3f" % (self.yoffset * self.pixel_size),
            "%.3f" % (self.slice_pos[0])
        ]
        ds.SOPClassUID = '1.2.840.10008.5.1.4.1.1.481.2'
        ds.SOPInstanceUID = '1.2.246.352.71.7.320687012.47206.20090603085223'
        ds.SeriesInstanceUID = '1.2.246.352.71.2.320687012.28240.20090603082420'

        # Bind to rtplan
        rt_set = Dataset()
        rt_set.RefdSOPClassUID = '1.2.840.10008.5.1.4.1.1.481.5'
        rt_set.RefdSOPInstanceUID = '1.2.3'
        ds.ReferencedRTPlans = Sequence([rt_set])
        pixel_array = np.zeros((len(self.cube), ds.Rows, ds.Columns),
                               dtype=self.pydata_type)
        pixel_array[:][:][:] = self.cube[:][:][:]
        ds.PixelData = pixel_array.tostring()
        return ds
Пример #19
0
 def testSetNewDataElementByName(self):
     """Dataset: set new data_element by name............................."""
     ds = Dataset()
     ds.TreatmentMachineName = "unit #1"
     data_element = ds[0x300a, 0x00b2]
     self.assertEqual(data_element.value, "unit #1",
                      "Unable to set data_element by name")
     self.assertEqual(data_element.VR, "SH",
                      "data_element not the expected VR")
Пример #20
0
    def WriteDICOM_slice(self, pixel_array,filename, itemnumber=0, PhotometricInterpretation="MONOCHROME2"):
        from dicom.dataset import Dataset, FileDataset
        import numpy as np
        import datetime, time
        """
        INPUTS:
        pixel_array: 2D numpy ndarray.  If pixel_array is larger than 2D, errors.
        filename: string name for the output file.
        """
        ## This code block was taken from the output of a MATLAB secondary
        ## capture.  I do not know what the long dotted UIDs mean, but
        ## this code works.
        file_meta = Dataset()
        file_meta.MediaStorageSOPClassUID = 'Secondary Capture Image Storage'
        file_meta.MediaStorageSOPInstanceUID = '1.3.6.1.4.1.9590.100.1.1.111165684411017669021768385720736873780'
        file_meta.ImplementationClassUID = '1.3.6.1.4.1.9590.100.1.0.100.4.0'
        ds = FileDataset(filename, {},file_meta = file_meta,preamble="\0"*128)
        ds.Modality = 'WSD'
        ds.ContentDate = str(datetime.date.today()).replace('-','')
        ds.ContentTime = str(time.time()) #milliseconds since the epoch
        ds.StudyInstanceUID =  '1.3.6.1.4.1.9590.100.1.1.124313977412360175234271287472804872093'
        ds.SeriesInstanceUID = '1.3.6.1.4.1.9590.100.1.1.369231118011061003403421859172643143649'
        ds.SOPInstanceUID =    '1.3.6.1.4.1.9590.100.1.1.111165684411017669021768385720736873780'
        ds.SOPClassUID = 'Secondary Capture Image Storage'
        ds.SecondaryCaptureDeviceManufctur = 'Python 2.7.3'
        ## These are the necessary imaging components of the FileDataset object.
        ds.SamplesPerPixel = 1
        if PhotometricInterpretation=="MONOCHROME2":
            ds.PhotometricInterpretation = "MONOCHROME2"
            ds.PixelRepresentation = 0
            ds.HighBit = 15
            ds.BitsStored = 16
            ds.BitsAllocated = 16
            ds.SmallestImagePixelValue = '\\x00\\x00'
            ds.LargestImagePixelValue = '\\xff\\xff'
        elif PhotometricInterpretation=="RGB":
            ds.PhotometricInterpretation = "MONOCHROME2"
            ds.PixelRepresentation = 0
            ds.HighBit = 15
            ds.BitsStored = 16
            ds.BitsAllocated = 16
            ds.SmallestImagePixelValue = '\\x00\\x00'
            ds.LargestImagePixelValue = '\\xff\\xff'    
            pixel_array = pixel_array[0]
            print pixel_array.shape
        ds.Columns = pixel_array.shape[0]
        ds.ItemNumber = str(itemnumber)
        ds.InstanceNumber = str(itemnumber)
        ds.SliceLocation = str(itemnumber)
        ds.Rows = pixel_array.shape[1]
        if pixel_array.dtype != np.uint16:
            pixel_array = pixel_array.astype(np.uint16)
            ds.PixelData = pixel_array.tostring()
        ds.save_as(filename)

        return filename
Пример #21
0
    def testTagExceptionPrint(self):
        # When printing datasets, a tag number should appear in error
        # messages
        ds = Dataset()
        ds.PatientID = "123456"  # Valid value
        ds.SmallestImagePixelValue = 0  # Invalid value

        expected_msg = "Invalid tag (0028, 0106): object of type 'int' has no len()"

        self.failUnlessExceptionArgs(expected_msg, TypeError, lambda: str(ds))
Пример #22
0
    def testTagExceptionPrint(self):
        # When printing datasets, a tag number should appear in error
        # messages
        ds = Dataset()
        ds.PatientID = "123456" # Valid value
        ds.SmallestImagePixelValue = 0 # Invalid value

        expected_msg = "Invalid tag (0028, 0106): object of type 'int' has no len()"

        self.failUnlessExceptionArgs(expected_msg, TypeError, lambda: str(ds))
Пример #23
0
    def testValidAssignment(self):
        """Sequence: ensure ability to assign a Dataset to a Sequence item"""
        ds = Dataset()
        ds.add_new((1,1), 'IS', 1)

        # Create a single element Sequence first
        seq = Sequence([Dataset(),])
        seq[0] = ds

        self.assertEqual(seq[0], ds, "Dataset modified during assignment")
Пример #24
0
    def testValidAssignment(self):
        """Sequence: ensure ability to assign a Dataset to a Sequence item"""
        ds = Dataset()
        ds.add_new((1, 1), 'IS', 1)

        # Create a single element Sequence first
        seq = Sequence([Dataset(), ])
        seq[0] = ds

        self.assertEqual(seq[0], ds, "Dataset modified during assignment")
Пример #25
0
 def testAttributeErrorInProperty(self):
     """Dataset: AttributeError in property raises actual error message..."""
     # This comes from bug fix for issue 42
     # First, fake enough to try the pixel_array property
     ds = Dataset()
     ds.file_meta = Dataset()
     ds.PixelData = 'xyzlmnop'
     attribute_error_msg = "AttributeError in pixel_array property: " + \
                        "Dataset does not have attribute 'TransferSyntaxUID'"
     self.failUnlessExceptionArgs(attribute_error_msg,
                     PropertyError, ds._get_pixel_array)
Пример #26
0
 def testAttributeErrorInProperty(self):
     """Dataset: AttributeError in property raises actual error message..."""
     # This comes from bug fix for issue 42
     # First, fake enough to try the pixel_array property
     ds = Dataset()
     ds.file_meta = Dataset()
     ds.PixelData = 'xyzlmnop'
     attribute_error_msg = "AttributeError in pixel_array property: " + \
                        "Dataset does not have attribute 'TransferSyntaxUID'"
     self.failUnlessExceptionArgs(attribute_error_msg, PropertyError,
                                  ds._get_pixel_array)
Пример #27
0
    def __init__(self):
        self.CommandSet = None
        self.EncodedDataSet = None
        self.DataSet = None
        self.encoded_command_set = ''
        self.ID = id

        self.ts = ImplicitVRLittleEndian  # imposed by standard.
        if self.__class__ != DIMSEMessage:
            self.CommandSet = Dataset()
            for ii in self.CommandFields:
                self.CommandSet.add_new(ii[1], ii[2], '')
Пример #28
0
    def __init__(self, fp, stop_when=None, force=False):
        """Read the preamble and meta info, prepare iterator for remainder

        fp -- an open DicomFileLike object, at start of file

        Adds flags to fp: Big/Little-endian and Implicit/Explicit VR
        """
        self.fp = fp
        self.stop_when = stop_when
        self.preamble = preamble = read_preamble(fp, force)
        self.has_header = has_header = (preamble is not None)
        self.file_meta_info = Dataset()
        if has_header:
            self.file_meta_info = file_meta_info = _read_file_meta_info(fp)
            transfer_syntax = file_meta_info.TransferSyntaxUID
            if transfer_syntax == dicom.UID.ExplicitVRLittleEndian:
                self._is_implicit_VR = False
                self._is_little_endian = True
            elif transfer_syntax == dicom.UID.ImplicitVRLittleEndian:
                self._is_implicit_VR = True
                self._is_little_endian = True
            elif transfer_syntax == dicom.UID.ExplicitVRBigEndian:
                self._is_implicit_VR = False
                self._is_little_endian = False
            elif transfer_syntax == dicom.UID.DeflatedExplicitVRLittleEndian:
                # See PS3.6-2008 A.5 (p 71) -- when written, the entire dataset
                #   following the file metadata was prepared the normal way,
                #   then "deflate" compression applied.
                #  All that is needed here is to decompress and then
                #      use as normal in a file-like object
                zipped = fp.read()
                # -MAX_WBITS part is from comp.lang.python answer:
                # groups.google.com/group/comp.lang.python/msg/e95b3b38a71e6799
                unzipped = zlib.decompress(zipped, -zlib.MAX_WBITS)
                fp = BytesIO(unzipped)  # a file-like object
                self.fp = fp  # point to new object
                self._is_implicit_VR = False
                self._is_little_endian = True
            else:
                # Any other syntax should be Explicit VR Little Endian,
                #   e.g. all Encapsulated (JPEG etc) are ExplVR-LE
                #        by Standard PS 3.5-2008 A.4 (p63)
                self._is_implicit_VR = False
                self._is_little_endian = True
        else:  # no header -- make assumptions
            fp.TransferSyntaxUID = dicom.UID.ImplicitVRLittleEndian
            self._is_little_endian = True
            self._is_implicit_VR = True

        impl_expl = ("Explicit", "Implicit")[self._is_implicit_VR]
        big_little = ("Big", "Little")[self._is_little_endian]
        logger.debug("Using {0:s} VR, {1:s} Endian transfer syntax".format(
                            impl_expl, big_little))
Пример #29
0
    def testValidInitialization(self):
        """Sequence: Ensure valid creation of Sequences using Dataset inputs"""
        inputs = {'PatientPosition': 'HFS',
                  'PatientSetupNumber': '1',
                  'SetupTechniqueDescription': ''}
        patientSetups = Dataset()
        patientSetups.update(inputs)

        # Construct the sequence
        seq = Sequence((patientSetups,))
        self.assertTrue(isinstance(seq[0], Dataset),
                        "Dataset modified during Sequence creation")
Пример #30
0
    def testValidInitialization(self):
        """Sequence: Ensure valid creation of Sequences using Dataset inputs"""
        inputs = {  'PatientPosition':'HFS',
                    'PatientSetupNumber':'1',
                    'SetupTechniqueDescription':''}
        patientSetups = Dataset()
        patientSetups.update(inputs)

        # Construct the sequence
        seq = Sequence((patientSetups,))
        self.assert_(isinstance(seq[0], Dataset), 
                      "Dataset modified during Sequence creation")
Пример #31
0
def saveDicomFile(filename,
                  patientName,
                  patientId,
                  gender,
                  birthday,
                  imageArray,
                  transpose=False):

    meta = Dataset()
    SOPClassUID = "1.2.840.10008.5.1.4.1.1.2"  # sop class UID dla obrazow CT
    meta.MediaStorageSOPClassUID = SOPClassUID  # Wygenerowany unikalny UID
    date = datetime.datetime.now().strftime('%Y%m%d')  # Obecny czas
    time = datetime.datetime.now().strftime('%H%M%S.%f')  # Obecny czas
    randomUId = SOPClassUID + "." + date + time  # Wygenerowany unikalny UID
    meta.MediaStorageSOPInstanceUID = randomUId  # Wygenerowany unikalny UID
    meta.ImplementationClassUID = randomUId + "." + "1"  # Wygenerowany unikalny UID

    dataSet = FileDataset(filename, {}, file_meta=meta,
                          preamble=b"\0" * 128)  # Utworzenie obiektu DICOM
    dataSet.PatientName = patientName  # Imie pacjenta
    dataSet.PatientID = patientId  # Id pacjenta
    dataSet.PatientBirthDate = birthday  # Data urodzenia pacjenta
    dataSet.PatientSex = gender  # Plec pacjenta
    dataSet.is_little_endian = True
    dataSet.is_implicit_VR = True
    dataSet.ContentDate = date  # Czas utworzenia pliku (YYYY:MM:DD)
    dataSet.StudyDate = date  # Czas ostatniego otworzenia obrazu (YYYY-MM-DD)
    dataSet.StudyTime = time  # Czas ostatniego otworzenia obrazu (HH:MM:SS)
    dataSet.ContentTime = time  # Czas utworzenia pliku (HH:MM:SS)
    dataSet.StudyInstanceUID = randomUId + "." + "2"  # Wygenerowany unikalny UID
    dataSet.SeriesInstanceUID = randomUId + "." + "3"  # Wygenerowany unikalny UID
    dataSet.SOPInstanceUID = randomUId + "." + "4"  # Wygenerowany unikalny UID
    dataSet.SOPClassUID = "CT." + date + time  # Wygenerowany unikalny UID

    dataSet.SamplesPerPixel = 1  # Liczba kanałów. 1 - dla skali szarosci
    dataSet.PhotometricInterpretation = "MONOCHROME2"  # MONOCHROE - obraz jest w skali szarości, 2 - maksymalna wartosc wskazuje kolor bialy
    dataSet.PixelRepresentation = 0  # 0 - wartosci sa tylko dodatnie (unsigned) 1 - wartosci sa tez ujemne
    dataSet.HighBit = 15  # Najważniejszy bit w pliku z obrazem
    dataSet.BitsStored = 16  # Liczba bitow na jedna wartosc w obrazie
    dataSet.BitsAllocated = 16  # Liczba bitow na jedna wartosc ktora jest zaalokowana dla obrazu
    dataSet.SmallestImagePixelValue = b'\\x00\\x00'  # Wskazanie minimalnej wartosci dla kanalu
    dataSet.LargestImagePixelValue = b'\\xff\\xff'  # Wskazanie maksymalnej wartosci dla kanalu
    dataSet.Rows = imageArray.shape[1]  # Liczba wierszy
    dataSet.Columns = imageArray.shape[0]  # Liczba kolumn
    if imageArray.dtype != np.uint16:  # Sprawdzenie czy wartosci sa w przedziale [0,255]
        imageArray = skimage.img_as_uint(
            imageArray)  # Zamiana na wartosci w przedziale [0,255]
        if transpose == True:  # Zamiana wierszy i kolumn (opcjonalne)
            dataSet.Rows = imageArray.shape[0]
            dataSet.Columns = imageArray.shape[1]
    dataSet.PixelData = imageArray.tostring()  # Zapisanie obrazu
    dataSet.save_as(filename)  # Zapisanie pliku na dysku
Пример #32
0
    def create_dicom_contour_data(self, i):
        """ Based on self.slices, Dicom conours are generated for the Dicom ROI.

        :returns: Dicom ROI_CONTOURS
        """
        roi_contours = Dataset()
        contours = []
        for slice in self.slices.values():
            contours.extend(slice.create_dicom_contours())
        roi_contours.Contours = Sequence(contours)
        roi_contours.ROIDisplayColor = self.get_color(i)

        return roi_contours
Пример #33
0
    def testTagExceptionWalk(self):
        # When recursing through dataset, a tag number should appear in
        # error messages
        ds = Dataset()
        ds.PatientID = "123456" # Valid value
        ds.SmallestImagePixelValue = 0 # Invalid value

        expected_msg = "Invalid tag (0028, 0106): object of type 'int' has no len()"

        callback = lambda dataset, data_element: str(data_element)
        func = lambda: ds.walk(callback)

        self.failUnlessExceptionArgs(expected_msg, TypeError, func)
Пример #34
0
    def testTagExceptionWalk(self):
        # When recursing through dataset, a tag number should appear in
        # error messages
        ds = Dataset()
        ds.PatientID = "123456"  # Valid value
        ds.SmallestImagePixelValue = 0  # Invalid value

        expected_msg = "Invalid tag (0028, 0106): object of type 'int' has no len()"

        callback = lambda dataset, data_element: str(data_element)
        func = lambda: ds.walk(callback)

        self.failUnlessExceptionArgs(expected_msg, TypeError, func)
Пример #35
0
 def testGetFromRaw(self):
     """Dataset: get(tag) returns same object as ds[tag] for raw element.."""
     # This came from issue 88, where get(tag#) returned a RawDataElement, 
     #     while get(name) converted to a true DataElement
     test_tag = 0x100010
     test_elem = RawDataElement(Tag(test_tag), 'PN', 4, 'test', 0, True, True) 
     ds = Dataset({Tag(test_tag): test_elem})
     by_get = ds.get(test_tag)
     by_item = ds[test_tag]
     
     # self.assertEqual(type(elem_get), type(name_get), "Dataset.get() returned different type for name vs tag access")
     msg = "Dataset.get() returned different objects for ds.get(tag) and ds[tag]:\nBy get():%r\nBy ds[tag]:%r\n"
     self.assertEqual(by_get, by_item, msg % (by_get, by_item))
def write_dicom(pixel_array, filename, ds_copy, ds_ori, volume_number,
                series_number, sop_id):
    """Write a dicom from a pixel_array (numpy).

    :param pixel_array: 2D numpy ndarray.
                        If pixel_array is larger than 2D, errors.
    :param filename: string name for the output file.
    :param ds_copy: pydicom object with the header that need to be copy
    :param ds_ori: original pydicom object of the pixel_array
    :param volume_number: number of the volume being processed
    :param series_number: number of the series being processed
    :param sop_id: SOPInstanceUID for the DICOM
    """
    # Set to zero negatives values in the image:
    pixel_array[pixel_array < 0] = 0

    # Set the DICOM dataset
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = 'Secondary Capture Image Storage'
    file_meta.MediaStorageSOPInstanceUID = ds_ori.SOPInstanceUID
    file_meta.ImplementationClassUID = ds_ori.SOPClassUID
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0"*128)

    # Copy the tag from the original DICOM
    for tag, value in ds_ori.items():
        if tag != ds_ori.data_element("PixelData").tag:
            ds[tag] = value

    # Other tags to set
    ds.SeriesNumber = series_number
    ds.SeriesDescription = ds_ori.SeriesDescription + ' reg_f3d'
    sop_uid = sop_id + str(datetime.datetime.now()).replace('-', '')\
                                                   .replace(':', '')\
                                                   .replace('.', '')\
                                                   .replace(' ', '')
    ds.SOPInstanceUID = sop_uid[:-1]
    ds.ProtocolName = ds_ori.ProtocolName
    ds.InstanceNumber = volume_number+1

    # Copy from T2 the orientation tags:
    for tag in TAGS_TO_COPY:
        if tag in ds_copy:
            ds[tag] = ds_copy[tag]

    # Set the Image pixel array
    if pixel_array.dtype != np.uint16:
        pixel_array = pixel_array.astype(np.uint16)
    ds.PixelData = pixel_array.tostring()

    # Save the image
    ds.save_as(filename)
Пример #37
0
def get_graphic_annotation(sop_class, sop_instance_uid, layer_name,
                           graphic_objects, text_objects):
    ds_graphic_annotation = Dataset()
    referenced_sequence_dataset = Dataset()
    referenced_sequence_dataset.ReferencedSOPClassUID = sop_class
    referenced_sequence_dataset.ReferencedSOPInstanceUID = sop_instance_uid
    ds_graphic_annotation.ReferencedImageSequence = Sequence(
        [referenced_sequence_dataset])
    ds_graphic_annotation.GraphicLayer = layer_name
    if graphic_objects is not None and len(graphic_objects) > 0:
        ds_graphic_annotation.GraphicObjects = Sequence(graphic_objects)
    if text_objects is not None and len(text_objects) > 0:
        ds_graphic_annotation.TextObjects = Sequence(text_objects)
    return ds_graphic_annotation
Пример #38
0
def write_dicom(pixel_array, filename):
    """
    INPUTS:
    pixel_array: 2D or 3D numpy ndarray.
    filename: string name for the output file.
    """

    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = '1.1'
    file_meta.MediaStorageSOPInstanceUID = '1.2'
    file_meta.ImplementationClassUID = '1.3'
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble=b'\x00' * 128)
    ds.Modality = 'PET'  # imaging modality name
    ds.ContentDate = str(datetime.date.today()).replace('-', '')
    ds.ContentTime = str(time.time())  # milliseconds since the epoch
    ds.StudyInstanceUID = '1.3'
    ds.SeriesInstanceUID = '1.3'
    ds.SOPInstanceUID = '1.3'
    ds.SOPClassUID = 'Secondary Capture Image Storage'
    ds.SecondaryCaptureDeviceManufctur = 'Python 3.7.5'

    # Patient-related data
    ds.PatientName = 'Wawrzyniec L. Dobrucki'
    ds.PatientID = 'M12345'
    ds.PatientSex = 'M'
    ds.PatientBirthDate = '12/12/12'
    ds.SeriesNumber = 5
    ds.SeriesDescription = 'test image'

    ds.SliceThickness = '0.5'  # slice thickness in mm
    ds.PixelSpacing = '0.5'  # pixel spacing or size in mm

    # These are the necessary imaging components of the FileDataset object.
    ds.SamplesPerPixel = 1
    ds.PhotometricInterpretation = "MONOCHROME2"
    ds.PixelRepresentation = 0  # unsigned (default)
    ds.HighBit = 15
    ds.BitsStored = 16  # default
    ds.BitsAllocated = 16  # default
    ds.NumberOfFrames = 216  # number of frames for 3D files
    # ds.SmallestImagePixelValue = '\\x00\\x00'
    # ds.LargestImagePixelValue = '\\xff\\xff'
    ds.Columns = pixel_array.shape[0]
    ds.Rows = pixel_array.shape[1]
    if pixel_array.dtype != np.uint16:
        pixel_array = pixel_array.astype(np.uint16)
    ds.PixelData = pixel_array.tostring()

    ds.save_as(filename)
    return
Пример #39
0
    def testGetFromRaw(self):
        """Dataset: get(tag) returns same object as ds[tag] for raw element.."""
        # This came from issue 88, where get(tag#) returned a RawDataElement,
        #     while get(name) converted to a true DataElement
        test_tag = 0x100010
        test_elem = RawDataElement(Tag(test_tag), 'PN', 4, 'test', 0, True,
                                   True)
        ds = Dataset({Tag(test_tag): test_elem})
        by_get = ds.get(test_tag)
        by_item = ds[test_tag]

        # self.assertEqual(type(elem_get), type(name_get), "Dataset.get() returned different type for name vs tag access")
        msg = "Dataset.get() returned different objects for ds.get(tag) and ds[tag]:\nBy get():%r\nBy ds[tag]:%r\n"
        self.assertEqual(by_get, by_item, msg % (by_get, by_item))
Пример #40
0
def writeDicom(ods, mrn, studyID, outdir):

    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = 'Secondary Capture Image Storage'
    file_meta.MediaStorageSOPInstanceUID = '0.0'
    file_meta.ImplementationClassUID = '0.0'
    ds = FileDataset(studyID, {}, file_meta=file_meta, preamble="\0" * 128)
    ds.Modality = ods.Modality if "Modality" in ods else ""
    ds.StudyDate = ods.StudyDate if "StudyDate" in ods else ""
    ds.StudyTime = ods.StudyTime if "StudyTime" in ods else ""
    ds.StudyInstanceUID = '0.0'
    ds.SeriesInstanceUID = '0.0'
    ds.SOPInstanceUID = '0.0'
    ds.SOPClassUID = 'Secondary Capture Image Storage'
    ds.SecondaryCaptureDeviceManufctur = 'Python 2.7.3'

    ## These are the necessary imaging components of the FileDataset object.
    ds.AccessionNumber = str(studyID)
    ds.PatientID = str(mrn)
    ds.StudyID = str(studyID)
    ds.PatientName = str(studyID)
    ds.PatientBirthDate = "00000000"
    ds.PatientAge = calculateAge(
        ods.StudyDate, ods.PatientBirthDate
    ) if "StudyDate" in ods and "PatientBirthDate" in ods else ""
    ds.PatientSex = ods.PatientSex if "PatientSex" in ods else ""
    ds.StudyDescription = ods.StudyDescription if "StudyDescription" in ods else ""
    ds.SeriesDescription = ods.SeriesDescription if "SeriesDescription" in ods else ""
    ds.ViewPosition = ods.ViewPosition if "ViewPosition" in ods else ""
    ds.InstanceNumber = ods.InstanceNumber if "InstanceNumber" in ods else ""
    ds.SeriesNumber = ods.SeriesNumber if "SeriesNumber" in ods else ""
    ds.SamplesPerPixel = ods.SamplesPerPixel if "SamplesPerPixel" in ods else ""
    ds.PhotometricInterpretation = ods.PhotometricInterpretation if "PhotometricInterpretation" in ods else ""
    ds.PixelRepresentation = ods.PixelRepresentation if "PixelRepresentation" in ods else ""
    ds.HighBit = ods.HighBit if "HighBit" in ods else ""
    ds.BitsStored = ods.BitsStored if "BitsStored" in ods else ""
    ds.BitsAllocated = ods.BitsAllocated if "BitsAllocated" in ods else ""
    ds.Columns = ods.Columns if "Columns" in ods else ""
    ds.Rows = ods.Rows if "Rows" in ods else ""
    ds.PixelData = ods.PixelData if "PixelData" in ods else ""
    filename = cleanString(
        str(studyID) + "_" + str(ds.SeriesNumber) + "_" +
        str(ds.InstanceNumber) + "_" + str(ds.Modality) + "_" +
        str(ds.StudyDescription) + "_" + str(ds.SeriesDescription) + "_" +
        str(ds.ViewPosition) + ".dcm")
    outpath = os.path.join(outdir, filename)
    ds.save_as(outpath)
    return
Пример #41
0
    def onReceiveStore(self, sopClass, receivedDs):
        """
        """
        self._logger.debug("Received C-STORE: " + receivedDs.PatientName)

        try:
            # DICOM header (metadata)
            file_meta = Dataset()
            file_meta.MediaStorageSOPClassUID = receivedDs.SOPClassUID
            file_meta.MediaStorageSOPInstanceUID = receivedDs.SOPInstanceUID
            # TransferSyntaxUID

            # pydicom root UID + 1
            file_meta.ImplementationClassUID = "1.2.826.0.1.3680043.8.498.1"

            path = self.downloadDir + os.sep + receivedDs.PatientID

            # Patient ID is the root folder
            if os.path.isdir(path) == False:
                os.mkdir(path)

            path = path + os.sep + receivedDs.StudyInstanceUID

            # DICOM study separated to subfolder under patient
            if os.path.isdir(path) == False:
                os.mkdir(path)

            filename = path + os.sep + receivedDs.Modality + "." + receivedDs.SOPInstanceUID + ".dcm"

            # Create a DICOM file
            ds = FileDataset(filename, {},
                             file_meta=file_meta,
                             preamble="\0" * 128)
            ds.update(receivedDs)
            ds.save_as(filename)

            self._fileCounter = self._fileCounter + 1

            self._logger.debug("File written to: " + filename)

            if self._guiThread:
                self._guiThread.emit(QtCore.SIGNAL("log(QString)"),
                                     "File written to: " + filename)
                self._guiThread.emit(QtCore.SIGNAL("taskUpdated"),
                                     self._fileCounter)

        except Exception, err:
            self._logger.error(str(err))
Пример #42
0
def OnReceiveStore(SOPClass, DS):
    #print "Received C-STORE"
    # do something with dataset. For instance, store it on disk.
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = DS.SOPClassUID
    file_meta.MediaStorageSOPInstanceUID = "1.2.3"  # !! Need valid UID here
    file_meta.ImplementationClassUID = "1.2.3.4"  # !!! Need valid UIDs here
    filename = '%s/%s.dcm' % (tempfile.gettempdir(), DS.SOPInstanceUID)
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
    ds.update(DS)
    ds.is_little_endian = True
    ds.is_implicit_VR = True
    ds.save_as(filename)
    #print "File %s written" % filename
    # must return appropriate status
    return SOPClass.Success
Пример #43
0
 def testInvalidAssignment(self):
     """Sequence: validate exception for invalid assignment"""
     seq = Sequence([
         Dataset(),
     ])
     # Attempt to assign an integer to the first element
     self.assertRaises(TypeError, seq.__setitem__, 0, 1)
Пример #44
0
def OnReceiveStore(SOPClass, DS):
    #print "Received C-STORE"
    # do something with dataset. For instance, store it on disk.
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = DS.SOPClassUID
    file_meta.MediaStorageSOPInstanceUID = "1.2.3"  # !! Need valid UID here
    file_meta.ImplementationClassUID = "1.2.3.4"  # !!! Need valid UIDs here
    filename = '%s/%s.dcm' % (tempfile.gettempdir(), DS.SOPInstanceUID)
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
    ds.update(DS)
    ds.is_little_endian = True
    ds.is_implicit_VR = True
    ds.save_as(filename)
    #print "File %s written" % filename
    # must return appropriate status
    return SOPClass.Success
Пример #45
0
def OnReceiveStore(SOPClass, DS):
    print "Received C-STORE"
    # do something with dataset. For instance, store it.
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'  # CT Image Storage
    file_meta.MediaStorageSOPInstanceUID = "1.2.3"  # !! Need valid UID here for real work
    file_meta.ImplementationClassUID = "1.2.3.4"  # !!! Need valid UIDs here
    filename = '/tmp/%s.dcm' % DS.SOPInstanceUID
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
    ds.update(DS)
    ds.is_little_endian = True
    ds.is_implicit_VR = True
    ds.save_as(filename)
    print "File %s written" % filename
    # must return appropriate status
    return 0
Пример #46
0
def OnReceiveStore(SOPClass, DS):
    print "Received C-STORE"
    # do something with dataset. For instance, store it.
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2' # CT Image Storage
    file_meta.MediaStorageSOPInstanceUID = "1.2.3" # !! Need valid UID here for real work
    file_meta.ImplementationClassUID = "1.2.3.4" # !!! Need valid UIDs here
    filename = '/tmp/%s.dcm' % DS.SOPInstanceUID
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0"*128)
    ds.update(DS)
    ds.is_little_endian = True
    ds.is_implicit_VR = True
    ds.save_as(filename)
    print "File %s written" % filename
    # must return appropriate status
    return 0
Пример #47
0
def write_dicom(pixel_array, index, filename):
    ## This code block was taken from the output of a MATLAB secondary
    ## capture.  I do not know what the long dotted UIDs mean, but
    ## this code works.
    target_dir = '/'.join(filename.split('/')[:-1]) 
    print(target_dir)
    mkdir_p(target_dir)
    file_meta = Dataset()
    ds = FileDataset(filename, {},file_meta = file_meta)
    ds.ContentDate = str(datetime.date.today()).replace('-','')
    ds.ContentTime = str(time.time()) #milliseconds since the epoch

    ## These are the necessary imaging components of the FileDataset object.
    ds.SamplesPerPixel = 1
    ds.InstanceNumber = index
    #ds.PhotometricInterpretation = bytes("MONOCHROME2", "UTF-8")
    ds.PixelRepresentation = 0
    ds.HighBit = 15
    ds.BitsStored = 16
    ds.BitsAllocated = 16
    #ds.SmallestImagePixelValue = bytes('\\x00\\x00', 'UTF-8')
    #ds.LargestImagePixelValue = bytes('\\xff\\xff', 'UTF-8')
            
    ds.Columns = pixel_array.shape[1]
    ds.Rows = pixel_array.shape[0]
    if pixel_array.dtype != np.uint16:
        pixel_array = pixel_array.astype(np.uint16)
    ds.PixelData = pixel_array.tostring()
    ds.save_as(filename)
 def test_matching_tags(self):
     """Dataset: key and data_element.tag mismatch raises ValueError....."""
     def set_wrong_tag():
         ds[0x10, 0x10] = data_element
     ds = Dataset()
     data_element = DataElement((0x300a, 0x00b2), "SH", "unit001")
     self.assertRaises(ValueError, set_wrong_tag)
Пример #49
0
class DicomIter(object):
    """Iterator over DICOM data elements created from a file-like object
    """
    def __init__(self, fp, stop_when=None, force=False):
        """Read the preambleand meta info, prepare iterator for remainder

        fp -- an open DicomFileLike object, at start of file

        Adds flags to fp: Big/Little-endian and Implicit/Explicit VR
        """
        self.fp = fp
        self.stop_when = stop_when
        self.preamble = preamble = read_preamble(fp, force)
        self.has_header = has_header = (preamble is not None)
        self.file_meta_info = Dataset()
        if has_header:
            self.file_meta_info = file_meta_info = _read_file_meta_info(fp)
            transfer_syntax = file_meta_info.TransferSyntaxUID
            if transfer_syntax == dicom.UID.ExplicitVRLittleEndian:
                self._is_implicit_VR = False
                self._is_little_endian = True
            elif transfer_syntax == dicom.UID.ImplicitVRLittleEndian:
                self._is_implicit_VR = True
                self._is_little_endian = True
            elif transfer_syntax == dicom.UID.ExplicitVRBigEndian:
                self._is_implicit_VR = False
                self._is_little_endian = False
            elif transfer_syntax == dicom.UID.DeflatedExplicitVRLittleEndian:
                # See PS3.6-2008 A.5 (p 71) -- when written, the entire dataset following
                #     the file metadata was prepared the normal way, then "deflate" compression applied.
                #  All that is needed here is to decompress and then use as normal in a file-like object
                zipped = fp.read()
                # -MAX_WBITS part is from comp.lang.python answer:  http://groups.google.com/group/comp.lang.python/msg/e95b3b38a71e6799
                unzipped = zlib.decompress(zipped, -zlib.MAX_WBITS)
                fp = StringIO(unzipped) # a file-like object that usual code can use as normal
                self.fp = fp #point to new object
                self._is_implicit_VR = False
                self._is_little_endian = True
            else:
                # Any other syntax should be Explicit VR Little Endian,
                #   e.g. all Encapsulated (JPEG etc) are ExplVR-LE by Standard PS 3.5-2008 A.4 (p63)
                self._is_implicit_VR = False
                self._is_little_endian = True
        else: # no header -- make assumptions
            fp.TransferSyntaxUID = dicom.UID.ImplicitVRLittleEndian
            self._is_little_endian = True
            self._is_implicit_VR = True

        logger.debug("Using %s VR, %s Endian transfer syntax" %(("Explicit", "Implicit")[self._is_implicit_VR], ("Big", "Little")[self._is_little_endian]))

    def __iter__(self):
        tags = sorted(self.file_meta_info.keys())
        for tag in tags:
            yield self.file_meta_info[tag]

        for data_element in data_element_generator(self.fp, self._is_implicit_VR, 
                                                   self._is_little_endian, 
                                                   stop_when=self.stop_when):
            yield data_element
Пример #50
0
def OnReceiveStore(SOPClass, DS):
    print "Received C-STORE"
    # do something with dataset. For instance, store it on disk.
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = DS.SOPClassUID
    file_meta.MediaStorageSOPInstanceUID = DS.SOPInstanceUID # Change to my own UID via dicom.UID.generate_uid() though I should make sure such genereated UIDs use my orgroot.
    file_meta.ImplementationClassUID = "1.2.826.0.1.3680043.9.5066.0"  # My UID for my implementation, with a '.0' appended to it to represent development testing, ie the UID I got from Medical Connections.
    file_meta.ImplementationVersionName = "DICOMMETRICS-DEV" # My implementation version name. Remember to change this for each version. Remember to correspondingly change the ImplementationClassUID too.
    filename = '%s/%s.dcm' % (tempfile.gettempdir(), DS.SOPInstanceUID)
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
    ds.update(DS)
    ds.is_little_endian = True
    ds.is_implicit_VR = True
    ds.save_as(filename)
    print "File %s written" % filename
    # must return appropriate status
    return SOPClass.Success
Пример #51
0
def OnReceiveStore(SOPClass, DS):
    print "Received C-STORE", DS.PatientName
    try:
        # do something with dataset. For instance, store it.
        file_meta = Dataset()
        file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
        file_meta.MediaStorageSOPInstanceUID = "1.2.3"  # !! Need valid UID here
        file_meta.ImplementationClassUID = "1.2.3.4"  # !!! Need valid UIDs here
        filename = '%s/%s.dcm' % (tempfile.gettempdir(), DS.SOPInstanceUID)
        ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0" * 128)
        ds.update(DS)
        ds.save_as(filename)
        print "File %s written" % filename
    except:
        pass
    # must return appropriate status
    return SOPClass.Success
Пример #52
0
def test_csa_header_read():
    hdr = csa.get_csa_header(DATA, 'image')
    assert_equal(hdr['n_tags'],83)
    assert_equal(csa.get_csa_header(DATA,'series')['n_tags'],65)
    assert_raises(ValueError, csa.get_csa_header, DATA,'xxxx')
    assert_true(csa.is_mosaic(hdr))
    # Get a shallow copy of the data, lacking the CSA marker
    # Need to do it this way because del appears broken in pydicom 0.9.7
    from dicom.dataset import Dataset
    data2 = Dataset()
    for element in DATA:
        if (element.tag.group, element.tag.elem) != (0x29, 0x10):
            data2.add(element)
    assert_equal(csa.get_csa_header(data2, 'image'), None)
    # Add back the marker - CSA works again
    data2[(0x29, 0x10)] = DATA[(0x29, 0x10)]
    assert_true(csa.is_mosaic(csa.get_csa_header(data2, 'image')))
Пример #53
0
        def OnReceiveStore(SOPClass, DS):
            file_meta = Dataset()
            file_meta.MediaStorageSOPClassUID = DS.SOPClassUID
            file_meta.MediaStorageSOPInstanceUID = DS.SOPInstanceUID
            file_meta.ImplementationClassUID = DS.SOPClassUID
            filename = os.path.join(
                constants.dicom_dir, foldername, DS.SOPInstanceUID + ".dcm")
            print "making file ", filename
            ds = FileDataset(
                filename, {}, file_meta=file_meta, preamble="\0" * 128)
            ds.update(DS)
            ds.is_little_endian = True
            ds.is_implicit_VR = True

            ds.save_as(filename)
            # print "File %s written" % filename
            # must return appropriate status
            return SOPClass.Success
Пример #54
0
def write_file(filename, dataset, WriteLikeOriginal=True):
    """Store a Dataset to the filename specified.
    
    Set dataset.preamble if you want something other than 128 0-bytes.
    If the dataset was read from an existing dicom file, then its preamble
    was stored at read time. It is up to you to ensure the preamble is still
    correct for its purposes.
    If there is no Transfer Syntax tag in the dataset,
       Set dataset.is_implicit_VR, and .is_little_endian
       to determine the transfer syntax used to write the file.
    WriteLikeOriginal -- True if want to preserve the following for each sequence 
        within this dataset:
        - preamble -- if no preamble in read file, than not used here
        - dataset.hasFileMeta -- if writer did not do file meta information,
            then don't write here either
        - seq.is_undefined_length -- if original had delimiters, write them now too,
            instead of the more sensible length characters
        - <dataset>.is_undefined_length_sequence_item -- for datasets that belong to a 
            sequence, write the undefined length delimiters if that is 
            what the original had
        Set WriteLikeOriginal = False to produce a "nicer" DICOM file for other readers,
            where all lengths are explicit.
    """

    # Decide whether to write DICOM preamble. Should always do so unless trying to mimic the original file read in
    preamble = getattr(dataset, "preamble", None)
    if not preamble and not WriteLikeOriginal:
        preamble = "\0" * 128

    file_meta = dataset.file_meta
    if file_meta is None:
        file_meta = Dataset()
    if "TransferSyntaxUID" not in file_meta:
        if dataset.is_little_endian and dataset.is_implicit_VR:
            file_meta.add_new((2, 0x10), "UI", ImplicitVRLittleEndian)
        elif dataset.is_little_endian and not dataset.is_implicit_VR:
            file_meta.add_new((2, 0x10), "UI", ExplicitVRLittleEndian)
        elif dataset.is_big_endian and not dataset.is_implicit_VR:
            file_meta.add_new((2, 0x10), "UI", ExplicitVRBigEndian)
        else:
            raise NotImplementedError, "pydicom has not been verified for Big Endian with Implicit VR"

    fp = DicomFile(filename, "wb")
    try:
        if preamble:
            fp.write(preamble)  # blank 128 byte preamble
            _write_file_meta_info(fp, file_meta)

        # Set file VR, endian. MUST BE AFTER writing META INFO (which changes to Explict LittleEndian)
        fp.is_implicit_VR = dataset.is_implicit_VR
        fp.is_little_endian = dataset.is_little_endian

        write_dataset(fp, dataset)
    finally:
        fp.close()
Пример #55
0
def create_dcm(filename,patname='BarcoQC',patid='112233'):


    textfile = open(filename,'rb')

    filename = os.path.splitext(filename)[0]+'.dcm'
    print "Output filename:", filename

    # Populate required values for file meta information
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.104.1' # NM Image Storage misschien beter OT
    file_meta.MediaStorageSOPInstanceUID = "1.2.3" # !! Need valid UID here for real work
    file_meta.ImplementationClassUID = "1.2.3.4" # !!! Need valid UIDs here
   
    print "Setting dataset values..."
   
    # Create the FileDataset instance (initially no data elements, but file_meta supplied)
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0"*128)
   
    # Add the data elements -- not trying to set all required here. Check DICOM standard
    ds.PatientName = patname
    ds.PatientID = patid
    
    #datestring = extract_date(textfile)
    #print datestring
    
    #ds.InstanceCreationDate = datestring
    # Set the transfer syntax
    ds.is_little_endian = True
    ds.is_implicit_VR = True

    #doctag = dicom.tag.Tag(("0042","0011"))
    doctag = dicom.tag.Tag(("0001","9999"))
    ds.add_new(doctag,'OB', textfile.read())

   
    print "Writing test file", filename
    ds.save_as(filename)
    print "File saved."
Пример #56
0
    def get_file_meta(self):
        """generate the dicom file meta data.
        """
        file_meta = Dataset()
        file_meta.FileMetaInformationVersion = '\x00\x01'
        file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2' # CT Image Storage
        
        file_meta.MediaStorageSOPInstanceUID = ''  # !! Need valid UID here for real work
        file_meta.TransferSyntaxUID = '1.2.840.10008.1.2'
 
        file_meta.ImplementationClassUID = pydicom_root_UID + "1" # !!! Need valid UIDs here
    
        return  file_meta
Пример #57
0
def write_dicom(pixel_array, filename, series_number, fdf_info,
                series_description, project, subject, session):
    """Write a dicom from a pixel_array (numpy).

    :param pixel_array: 2D numpy ndarray.
                        If pixel_array is larger than 2D, errors.
    :param filename: string name for the output file.
    :param ds_ori: original pydicom object of the pixel_array
    :param series_number: number of the series being processed
    :param series_description: series description for Osirix display
    :param project: XNAT Project ID
    :param subject: XNAT Subject label
    :param session: XNAT Session label
    """
    # Variables
    pixel_array = np.rot90(pixel_array)
    pixel_array = np.rot90(pixel_array)
    pixel_array = np.rot90(pixel_array)
    # less than zero
    # pixel_array[pixel_array < 0] = 0
    now = datetime.datetime.now()
    date = '%d%02d%02d' % (now.year, now.month, now.day)
    sop_id = '1.2.840.10008.5.1.4.1.1.4.%s' % date
    ti = str(time.time())
    uid = sop_id + ti
    # Other tags to set
    sop_uid = sop_id + str(now).replace('-', '')\
                               .replace(':', '')\
                               .replace('.', '')\
                               .replace(' ', '')
    # Number of frames
    size = pixel_array.shape
    nb_frames = None
    if len(size) == 3:
        nb_frames = size[2]
    elif len(size) == 4:
        nb_frames = size[2]*size[3]

    # Create the ds object
    file_meta = Dataset()
    file_meta.MediaStorageSOPClassUID = 'Secondary Capture Image Storage'
    file_meta.MediaStorageSOPInstanceUID = sop_id
    file_meta.ImplementationClassUID = '1.2.840.10008.5.1.4.1.1.4'
    ds = FileDataset(filename, {}, file_meta=file_meta, preamble="\0"*128)

    ds.SeriesDate = '%d%02d%02d' % (now.year, now.month, now.day)
    ds.ContentDate = '%d%02d%02d' % (now.year, now.month, now.day)
    ds.ContentTime = str(time.time())  # milliseconds since the epoch
    ds.StudyInstanceUID = uid
    ds.SeriesInstanceUID = uid
    # Other tags to set
    ds.SOPInstanceUID = sop_uid[:-1]
    ds.SOPClassUID = 'Secondary Capture Image Storage'
    ds.SecondaryCaptureDeviceManufctur = 'Python 2.7.3'

    # These are the necessary imaging components of the FileDataset object.
    ds.Modality = 'MR'
    ds.ConversionType = 'WSD'
    ds.SamplesPerPixel = 1
    ds.PhotometricInterpretation = "MONOCHROME2"
    ds.PixelRepresentation = 0
    ds.HighBit = 15
    ds.BitsStored = 16
    ds.BitsAllocated = 16
    ds.Columns = pixel_array.shape[0]
    ds.Rows = pixel_array.shape[1]
    ds.ProtocolName = '%s 9.4T' % series_description
    ds.StudyDescription = project
    ds.PatientsName = subject
    ds.PatientID = session
    ds.SeriesDescription = series_description
    ds.SeriesNumber = 1
    ds.SmallestImagePixelValue = pixel_array.min()
    ds.LargestImagePixelValue = pixel_array.max()
    ds.AcquisitionNumber = 1
    ds.SamplesperPixel = 1
    ds.PixelSpacing = '%s\%s' % (fdf_info['spacing'][0],
                                 fdf_info['spacing'][1])
    ds.SpacingBetweenSlices = float(fdf_info['spacing'][2])
    ds.ImageOrientation = '\\'.join(fdf_info['orientation'])
    ds.PatientPosition = '\\'.join(fdf_info['origin'])
    if nb_frames:
        ds.NumberOfFrames = nb_frames

    # Organise the array:
    if len(size) == 3:
        pixel_array2 = np.zeros((size[0]*size[2], size[1]))
        for i in range(size[2]):
            pixel_array2[size[0]*i:size[0]*(i+1), :] = pixel_array[:, :, i]
    elif len(size) > 3:
        pixel_array2 = np.zeros((size[0]*size[2]*size[3], size[1]))
        for i in range(size[2]):
            for j in range(size[3]):
                pixel_array2[size[0]*j+i*size[3]*size[0]:
                             size[0]*(j+1)+i*size[3]*size[0],
                             :] = pixel_array[:, :, i, j]
    else:
        pixel_array2 = pixel_array

    # Set the Image pixel array
    if pixel_array2.dtype != np.uint16:
        pixel_array2 = pixel_array2.astype(np.uint16)
        print pixel_array2.max()
        print pixel_array2.min()

    ds.PixelData = pixel_array2.tostring()
    # Save the image
    ds.save_as(filename)