def create_metadata():
     metadata = Dataset()
     metadata.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.7'
     metadata.MediaStorageSOPInstanceUID = '1.2.3'
     metadata.TransferSyntaxUID = '1.2.840.10008.1.2'
     metadata.ImplementationClassUID = '1.3.6.1.4.1.5962.2'
     return metadata
예제 #2
0
 def test_reshape_pixel_array_not_implemented(self):
     """Test Dataset._reshape_pixel_array raises exception"""
     ds = Dataset()
     ds.SamplesPerPixel = 2
     ds.BitsAllocated = 16
     with pytest.raises(NotImplementedError):
         ds._reshape_pixel_array(None)
예제 #3
0
파일: dos.py 프로젝트: pytrip/pytrip
    def create_dicom(self):
        """ Creates a DICOM RT-Dose object from self.

        This function can be used to convert a TRiP98 Dose file to DICOM format.

        :returns: a DICOM RT-Dose object.
        """

        if not _dicom_loaded:
            raise ModuleNotLoadedError("DICOM")
        if not self.header_set:
            raise InputError("Header not loaded")

        ds = self.create_dicom_base()
        ds.Modality = 'RTDOSE'
        ds.SamplesPerPixel = 1
        ds.BitsAllocated = self.num_bytes * 8
        ds.BitsStored = self.num_bytes * 8
        ds.AccessionNumber = ''
        ds.SeriesDescription = 'RT Dose'
        ds.DoseUnits = 'GY'
        ds.DoseType = 'PHYSICAL'
        ds.DoseGridScaling = self.target_dose / 10**5
        ds.DoseSummationType = 'PLAN'
        ds.SliceThickness = ''
        ds.InstanceCreationDate = '19010101'
        ds.InstanceCreationTime = '000000'
        ds.NumberOfFrames = len(self.cube)
        ds.PixelRepresentation = 0
        ds.StudyID = '1'
        ds.SeriesNumber = '14'  # SeriesNumber tag 0x0020,0x0011 (type IS - Integer String)
        ds.GridFrameOffsetVector = [x * self.slice_distance for x in range(self.dimz)]
        ds.InstanceNumber = ''
        ds.PositionReferenceIndicator = "RF"
        ds.TissueHeterogeneityCorrection = ['IMAGE', 'ROI_OVERRIDE']
        ds.ImagePositionPatient = ["%.3f" % (self.xoffset * self.pixel_size), "%.3f" % (self.yoffset * self.pixel_size),
                                   "%.3f" % (self.slice_pos[0])]
        ds.SOPClassUID = '1.2.840.10008.5.1.4.1.1.481.2'
        ds.SOPInstanceUID = '1.2.246.352.71.7.320687012.47206.20090603085223'

        # Study Instance UID tag 0x0020,0x000D (type UI - Unique Identifier)
        # self._dicom_study_instance_uid may be either set in __init__ when creating new object
        #   or set when import a DICOM file
        #   Study Instance UID for structures is the same as Study Instance UID for CTs
        ds.StudyInstanceUID = self._dicom_study_instance_uid

        # Series Instance UID tag 0x0020,0x000E (type UI - Unique Identifier)
        # self._dose_dicom_series_instance_uid may be either set in __init__ when creating new object
        #   Series Instance UID might be different than Series Instance UID for CTs
        ds.SeriesInstanceUID = self._dose_dicom_series_instance_uid

        # Bind to rtplan
        rt_set = Dataset()
        rt_set.RefdSOPClassUID = '1.2.840.10008.5.1.4.1.1.481.5'
        rt_set.RefdSOPInstanceUID = '1.2.3'
        ds.ReferencedRTPlanSequence = Sequence([rt_set])
        pixel_array = np.zeros((len(self.cube), ds.Rows, ds.Columns), dtype=self.pydata_type)
        pixel_array[:][:][:] = self.cube[:][:][:]
        ds.PixelData = pixel_array.tostring()
        return ds
예제 #4
0
 def test_conversion_rq(self):
     """ Check conversion to a -RQ PDU produces the correct output """
     primitive = C_MOVE_ServiceParameters()
     primitive.MessageID = 7
     primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
     primitive.Priority = 0x02
     primitive.MoveDestination = validate_ae_title("MOVE_SCP")
     
     refIdentifier = Dataset()
     refIdentifier.PatientID = '*'
     refIdentifier.QueryRetrieveLevel = "PATIENT"
     
     primitive.Identifier = BytesIO(encode(refIdentifier, True, True))
     
     dimse_msg = C_MOVE_RQ()
     dimse_msg.primitive_to_message(primitive)
     
     pdvs = dimse_msg.Encode(1, 16382)
     
     # Command Set
     ref = b'\x03\x00\x00\x00\x00\x04\x00\x00\x00\x62\x00\x00\x00\x00\x00\x02' \
           b'\x00\x1a\x00\x00\x00\x31\x2e\x32\x2e\x38\x34\x30\x2e\x31\x30\x30' \
           b'\x30\x38\x2e\x35\x2e\x31\x2e\x34\x2e\x31\x2e\x31\x2e\x32\x00\x00' \
           b'\x00\x00\x01\x02\x00\x00\x00\x21\x00\x00\x00\x10\x01\x02\x00\x00' \
           b'\x00\x07\x00\x00\x00\x00\x06\x10\x00\x00\x00\x4d\x4f\x56\x45\x5f' \
           b'\x53\x43\x50\x20\x20\x20\x20\x20\x20\x20\x20\x00\x00\x00\x07\x02' \
           b'\x00\x00\x00\x02\x00\x00\x00\x00\x08\x02\x00\x00\x00\x01\x00'
     self.assertEqual(pdvs[0].presentation_data_value_list[0][1], ref)
     
     # Dataset
     ref = b'\x02\x08\x00\x52\x00\x08\x00\x00\x00\x50\x41\x54\x49\x45\x4e\x54' \
           b'\x20\x10\x00\x20\x00\x02\x00\x00\x00\x2a\x20'
     self.assertEqual(pdvs[1].presentation_data_value_list[0][1], ref)
예제 #5
0
 def test_assignment(self):
     """ Check assignment works correctly """
     primitive = C_MOVE_ServiceParameters()
     
     primitive.MessageID = 11
     self.assertEqual(primitive.MessageID, 11)
     
     primitive.MessageIDBeingRespondedTo = 13
     self.assertEqual(primitive.MessageIDBeingRespondedTo, 13)
     
     primitive.AffectedSOPClassUID = '1.2.4.10'
     self.assertEqual(primitive.AffectedSOPClassUID, '1.2.4.10')
     
     primitive.Priority = 0x02
     self.assertEqual(primitive.Priority, 0x02)
     
     primitive.MoveDestination = 'UNITTEST_SCP'
     self.assertEqual(primitive.MoveDestination, b'UNITTEST_SCP    ')
     
     refDataset = Dataset()
     refDataset.PatientID = 1234567
     
     primitive.Identifier = BytesIO(encode(refDataset, True, True))
     #self.assertEqual(primitive.DataSet, refDataset)
     
     primitive.Status = 0x0000
     self.assertEqual(primitive.Status, 0x0000)
     
     primitive.Status = 0xC123
     self.assertEqual(primitive.Status, 0xC123)
예제 #6
0
 def testAddEntry(self):
     """dicom_dictionary: Can add and use a single dictionary entry"""
     add_dict_entry(0x10011001, "UL", "TestOne", "Test One")
     add_dict_entry(0x10011002, "DS", "TestTwo", "Test Two", VM='3')
     ds = Dataset()
     ds.TestOne = 'test'
     ds.TestTwo = ['1', '2', '3']
예제 #7
0
 def testSetNewDataElementByName(self):
     """Dataset: set new data_element by name............................"""
     ds = Dataset()
     ds.TreatmentMachineName = "unit #1"
     data_element = ds[0x300A, 0x00B2]
     self.assertEqual(data_element.value, "unit #1", "Unable to set data_element by name")
     self.assertEqual(data_element.VR, "SH", "data_element not the expected VR")
예제 #8
0
 def test_conversion_rsp(self):
     """ Check conversion to a -RSP PDU produces the correct output """
     primitive = C_FIND_ServiceParameters()
     primitive.MessageIDBeingRespondedTo = 5
     primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
     primitive.Status = 0xFF00
     
     refIdentifier = Dataset()
     refIdentifier.QueryRetrieveLevel = "PATIENT"
     refIdentifier.RetrieveAETitle = validate_ae_title("FINDSCP")
     refIdentifier.PatientName = "ANON^A^B^C^D"
     
     primitive.Identifier = BytesIO(encode(refIdentifier, True, True))
     
     dimse_msg = C_FIND_RSP()
     dimse_msg.primitive_to_message(primitive)
     
     pdvs = dimse_msg.Encode(1, 16382)
     
     # Command Set
     ref = b'\x03\x00\x00\x00\x00\x04\x00\x00\x00\x4a\x00\x00\x00\x00\x00\x02' \
           b'\x00\x1a\x00\x00\x00\x31\x2e\x32\x2e\x38\x34\x30\x2e\x31\x30\x30' \
           b'\x30\x38\x2e\x35\x2e\x31\x2e\x34\x2e\x31\x2e\x31\x2e\x32\x00\x00' \
           b'\x00\x00\x01\x02\x00\x00\x00\x20\x80\x00\x00\x20\x01\x02\x00\x00' \
           b'\x00\x05\x00\x00\x00\x00\x08\x02\x00\x00\x00\x01\x00\x00\x00\x00' \
           b'\x09\x02\x00\x00\x00\x00\xff'
     self.assertEqual(pdvs[0].presentation_data_value_list[0][1], ref)
     
     ref = b'\x02\x08\x00\x52\x00\x08\x00\x00\x00\x50\x41\x54\x49\x45\x4e\x54' \
           b'\x20\x08\x00\x54\x00\x10\x00\x00\x00\x46\x49\x4e\x44\x53\x43\x50' \
           b'\x20\x20\x20\x20\x20\x20\x20\x20\x20\x10\x00\x10\x00\x0c\x00\x00' \
           b'\x00\x41\x4e\x4f\x4e\x5e\x41\x5e\x42\x5e\x43\x5e\x44'
     self.assertEqual(pdvs[1].presentation_data_value_list[0][1], ref)
예제 #9
0
    def testEqualitySequence(self):
        """Dataset: equality returns correct value when dataset has sequences"""
        # Test even sequences
        d = Dataset()
        d.SOPInstanceUID = "1.2.3.4"
        d.BeamSequence = []
        beam_seq = Dataset()
        beam_seq.PatientName = "ANON"
        d.BeamSequence.append(beam_seq)
        self.assertTrue(d == d)

        e = Dataset()
        e.SOPInstanceUID = "1.2.3.4"
        e.BeamSequence = []
        beam_seq = Dataset()
        beam_seq.PatientName = "ANON"
        e.BeamSequence.append(beam_seq)
        self.assertTrue(d == e)

        e.BeamSequence[0].PatientName = "ANONY"
        self.assertFalse(d == e)

        # Test uneven sequences
        e.BeamSequence[0].PatientName = "ANON"
        self.assertTrue(d == e)

        e.BeamSequence.append(beam_seq)
        self.assertFalse(d == e)

        d.BeamSequence.append(beam_seq)
        self.assertTrue(d == e)
        d.BeamSequence.append(beam_seq)
        self.assertFalse(d == e)
예제 #10
0
 def test_assignment(self):
     """ Check assignment works correctly """
     primitive = C_FIND_ServiceParameters()
     
     primitive.MessageID = 11
     self.assertEqual(primitive.MessageID, 11)
     
     primitive.MessageIDBeingRespondedTo = 13
     self.assertEqual(primitive.MessageIDBeingRespondedTo, 13)
     
     primitive.AffectedSOPClassUID = '1.2.4.10'
     self.assertEqual(primitive.AffectedSOPClassUID, '1.2.4.10')
     
     primitive.Priority = 0x02
     self.assertEqual(primitive.Priority, 0x02)
     
     refDataset = Dataset()
     refDataset.PatientID = '*'
     refDataset.QueryRetrieveLevel = "PATIENT"
     
     primitive.Identifier = BytesIO(encode(refDataset, True, True))
     #self.assertEqual(primitive.DataSet, refDataset)
     
     primitive.Status = 0x0000
     self.assertEqual(primitive.Status, 0x0000)
     
     primitive.Status = 0xC123
     self.assertEqual(primitive.Status, 0xC123)
예제 #11
0
def _build_message_classes(message_name):
    """
    Create a new subclass instance of DIMSEMessage for the given DIMSE
    `message_name`.
    
    Parameters
    ----------
    message_name : str
        The name/type of message class to construct, one of the following:
        * C-ECHO-RQ
        * C-ECHO-RSP
        * C-STORE-RQ
        * C-STORE-RSP
        * C-FIND-RQ
        * C-FIND-RSP
        * C-GET-RQ
        * C-GET-RSP
        * C-MOVE-RQ
        * C-MOVE-RSP
        * C-CANCEL-RQ
        * N-EVENT-REPORT-RQ
        * N-EVENT-REPORT-RSP
        * N-GET-RQ
        * N-GET-RSP
        * N-SET-RQ
        * N-SET-RSP
        * N-ACTION-RQ
        * N-ACTION-RSP
        * N-CREATE-RQ
        * N-CREATE-RSP
        * N-DELETE-RQ
        * N-DELETE-RSP
    """
    def __init__(self):
        DIMSEMessage.__init__(self)

    # Create new subclass of DIMSE Message using the supplied name
    #   but replace hyphens with underscores
    cls = type(message_name.replace('-', '_'), 
                (DIMSEMessage,), 
                {"__init__": __init__})

    # Create a new Dataset object for the command_set attributes
    d = Dataset()
    for elem_tag in command_set_elem[message_name]:
        tag = Tag(elem_tag)
        vr = dcm_dict[elem_tag][0]

        # If the required command set elements are expanded this will need
        #   to be checked to ensure it functions OK
        try:
            d.add_new(tag, vr, None)
        except:
            d.add_new(tag, vr, '')

    cls.command_set = d

    globals()[cls.__name__] = cls
    
    return cls
예제 #12
0
 def testSetNonDicom(self):
     """Dataset: can set class instance property (non-dicom)............."""
     ds = Dataset()
     ds.SomeVariableName = 42
     has_it = hasattr(ds, "SomeVariableName")
     self.assertTrue(has_it, "Variable did not get created")
     if has_it:
         self.assertEqual(ds.SomeVariableName, 42, "There, but wrong value")
예제 #13
0
 def test_top(self):
     """Test Dataset.top returns only top level str"""
     ds = Dataset()
     ds.PatientName = 'CITIZEN^Jan'
     ds.BeamSequence = [Dataset()]
     ds.BeamSequence[0].PatientID = 'JAN^Citizen'
     assert "Patient's Name" in ds.top()
     assert "Patient ID" not in ds.top()
예제 #14
0
 def test_get_pixel_array_already_have(self):
     """Test Dataset._get_pixel_array when we already have the array"""
     # Test that _pixel_array is returned unchanged unless required
     ds = Dataset()
     ds.PixelData = b'\x00'
     ds._pixel_id = id(ds.PixelData)
     ds._pixel_array = 'Test Value'
     assert ds._get_pixel_array() == 'Test Value'
예제 #15
0
def on_c_store(dataset):
    """
    Function replacing ApplicationEntity.on_store(). Called when a dataset is 
    received following a C-STORE. Write the received dataset to file 
    
    Parameters
    ----------
    dataset - pydicom.Dataset
        The DICOM dataset sent via the C-STORE
            
    Returns
    -------
    status : pynetdicom.SOPclass.Status or int
        A valid return status code, see PS3.4 Annex B.2.3 or the 
        StorageServiceClass implementation for the available statuses
    """
    mode_prefix = 'UN'
    mode_prefixes = {'CT Image Storage' : 'CT',
                     'Enhanced CT Image Storage' : 'CTE',
                     'MR Image Storage' : 'MR',
                     'Enhanced MR Image Storage' : 'MRE',
                     'Positron Emission Tomography Image Storage' : 'PT',
                     'Enhanced PET Image Storage' : 'PTE',
                     'RT Image Storage' : 'RI',
                     'RT Dose Storage' : 'RD',
                     'RT Plan Storage' : 'RP',
                     'RT Structure Set Storage' : 'RS',
                     'Computed Radiography Image Storage' : 'CR',
                     'Ultrasound Image Storage' : 'US',
                     'Enhanced Ultrasound Image Storage' : 'USE',
                     'X-Ray Angiographic Image Storage' : 'XA',
                     'Enhanced XA Image Storage' : 'XAE',
                     'Nuclear Medicine Image Storage' : 'NM',
                     'Secondary Capture Image Storage' : 'SC'}

    try:
        mode_prefix = mode_prefixes[dataset.SOPClassUID.__str__()]
    except:
        pass

    filename = '%s.%s' %(mode_prefix, dataset.SOPInstanceUID)
    logger.info('Storing DICOM file: %s' %filename)
    
    if os.path.exists(filename):
        logger.warning('DICOM file already exists, overwriting')
    
    meta = Dataset()
    meta.MediaStorageSOPClassUID = dataset.SOPClassUID
    meta.MediaStorageSOPInstanceUID = dataset.SOPInstanceUID
    meta.ImplementationClassUID = pynetdicom_uid_prefix
    
    ds = FileDataset(filename, {}, file_meta=meta, preamble=b"\0" * 128)
    ds.update(dataset)
    ds.is_little_endian = True
    ds.is_implicit_VR = True
    ds.save_as(filename)
        
    return 0x0000 # Success
예제 #16
0
 def test_conversion_rq(self):
     """ Check conversion to a -RQ PDU produces the correct output """
     primitive = C_FIND_ServiceParameters()
     primitive.MessageID = 7
     primitive.AffectedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
     primitive.Priority = 0x02
     
     refIdentifier = Dataset()
     refIdentifier.PatientID = '*'
     refIdentifier.QueryRetrieveLevel = "PATIENT"
     
     primitive.Identifier = BytesIO(encode(refIdentifier, True, True))
     
     dimse_msg = C_FIND_RQ()
     dimse_msg.primitive_to_message(primitive)
     
     pdvs = dimse_msg.Encode(1, 16382)
     
     ## Command Set
     # \x03 Message Control Header Byte
     #
     # \x00\x00\x04\x00\x00\x00\x4a\x00\x00\x00
     # (0000, 0000) UL [74] # 4, 1 Command Group Length
     #
     # \x00\x00\x00\x02\x31\x2e\x32\x2e\x38\x34\x30\x2e\x31\x30\x30
     # \x30\x38\x2e\x35\x2e\x31\x2e\x34\x2e\x31\x2e\x31\x2e\x32\x00
     # (0000, 0002) UI [1.2.840.10008.5.1.4.1.1.2] #  26, 1 Affected SOP Class UID (if odd length, trailing 0x00)
     #
     # \x00\x00\x00\x01\x02\x00\x00\x00\x20\x00
     # (0000, 0100) US [0x00, 0x20] #  2, 1 Command Field
     #
     # \x00\x00\x10\x01\x02\x00\x00\x00\x07\x00
     # (0000, 0110) US [7] #  2, 1 Message ID
     #
     # \x00\x00\x00\x07\x02\x00\x00\x00\x02\x00
     # (0000, 0700) US [2] #  2, 1 Priority
     #
     # \x00\x00\x00\x08\x02\x00\x00\x00\x01\x00
     # (0000, 0800) US [1] #  2, 1 Command Data Set Type
     ref = b'\x03\x00\x00\x00\x00\x04\x00\x00\x00\x4a\x00\x00\x00\x00\x00\x02' \
           b'\x00\x1a\x00\x00\x00\x31\x2e\x32\x2e\x38\x34\x30\x2e\x31\x30\x30' \
           b'\x30\x38\x2e\x35\x2e\x31\x2e\x34\x2e\x31\x2e\x31\x2e\x32\x00\x00' \
           b'\x00\x00\x01\x02\x00\x00\x00\x20\x00\x00\x00\x10\x01\x02\x00\x00' \
           b'\x00\x07\x00\x00\x00\x00\x07\x02\x00\x00\x00\x02\x00\x00\x00\x00' \
           b'\x08\x02\x00\x00\x00\x01\x00'
     self.assertEqual(pdvs[0].presentation_data_value_list[0][1], ref)
     
     ## Dataset
     # \x02 Message Control Header Byte
     #
     # \x08\x00\x52\x00\x08\x00\x00\x00\x50\x41\x54\x49\x45\x4e\x54\x20
     # (0008, 0052) CS [PATIENT ] #  8, 1 Query/Retrieve Level (leading/trailing spaces non-significant)
     #
     # \x10\x00\x20\x00\x02\x00\x00\x00\x2a\x20
     # (0010, 0020) LO [* ] #  2, 1 Patient ID (may be padded with leading/trailing spaces)
     ref = b'\x02\x08\x00\x52\x00\x08\x00\x00\x00\x50\x41\x54\x49\x45\x4e\x54' \
           b'\x20\x10\x00\x20\x00\x02\x00\x00\x00\x2a\x20'
     self.assertEqual(pdvs[1].presentation_data_value_list[0][1], ref)
예제 #17
0
    def test_write_explicit_vr_raises(self):
        """Test writing explicit vr raises exception if unsolved element."""
        ds = Dataset()
        ds.PerimeterValue = b'\x00\x01'

        def test():
            write_dataset(self.fp, ds)

        self.assertRaises(ValueError, test)
예제 #18
0
    def testValidInitialization(self):
        """Sequence: Ensure valid creation of Sequences using Dataset inputs"""
        inputs = {"PatientPosition": "HFS", "PatientSetupNumber": "1", "SetupTechniqueDescription": ""}
        patientSetups = Dataset()
        patientSetups.update(inputs)

        # Construct the sequence
        seq = Sequence((patientSetups,))
        self.assertTrue(isinstance(seq[0], Dataset), "Dataset modified during Sequence creation")
예제 #19
0
 def testUID(self):
     """DataElement: setting or changing UID results in UID type........."""
     ds = Dataset()
     ds.TransferSyntaxUID = "1.2.3"
     self.assertTrue(isinstance(ds.TransferSyntaxUID, UID),
                     "Assignment to UID did not create UID class")
     ds.TransferSyntaxUID += ".4.5.6"
     self.assertTrue(isinstance(ds.TransferSyntaxUID, UID),
                     "+= to UID did not keep as UID class")
 def test_non_fatal_errors(self):
     dataset = Dataset()
     dataset.SOPClassUID = '1.2.840.10008.5.1.4.1.1.2'  # CT Image Storage
     file_dataset = FileDataset('test', dataset, file_meta=self.create_metadata())
     write_file('test', file_dataset, write_like_original=False)
     error_dict = self.validator.validate('test')
     self.assertEqual(1, len(error_dict))
     errors = error_dict['test']
     self.assertNotIn('fatal', errors)
예제 #21
0
    def test_creation_with_container(self):
        """FileDataset.__init__ works OK with a container such as gzip"""
        class Dummy(object):
            filename = '/some/path/to/test'

        ds = Dataset()
        ds.PatientName = "CITIZEN^Jan"
        fds = FileDataset(Dummy(), ds)
        assert fds.filename == '/some/path/to/test'
예제 #22
0
    def test_inequality(self):
        """Test inequality operator"""
        d = Dataset()
        d.SOPInstanceUID = '1.2.3.4'
        self.assertFalse(d != d)

        e = Dataset()
        e.SOPInstanceUID = '1.2.3.5'
        self.assertTrue(d != e)
예제 #23
0
    def testEqualityUnknown(self):
        """Dataset: equality returns correct value with extra members """
        # Non-element class members are ignored in equality testing
        d = Dataset()
        d.SOPEustaceUID = '1.2.3.4'
        assert d == d

        e = Dataset()
        e.SOPEustaceUID = '1.2.3.5'
        assert d == e
예제 #24
0
 def testAddEntries(self):
     """dicom_dictionary: add and use a dict of new dictionary entries"""
     new_dict_items = {
         0x10011001: ('UL', '1', "Test One", '', 'TestOne'),
         0x10011002: ('DS', '3', "Test Two", '', 'TestTwo'),
         }
     add_dict_entries(new_dict_items)
     ds = Dataset()
     ds.TestOne = 'test'
     ds.TestTwo = ['1', '2', '3']
예제 #25
0
    def testValidAssignment(self):
        """Sequence: ensure ability to assign a Dataset to a Sequence item"""
        ds = Dataset()
        ds.add_new((1, 1), 'IS', 1)

        # Create a single element Sequence first
        seq = Sequence([Dataset(), ])
        seq[0] = ds

        self.assertEqual(seq[0], ds, "Dataset modified during assignment")
 def new_data_set(tags):
     """ Create a DICOM data set with the given attributes """
     tags = tags or {}
     data_set = Dataset()
     for tag_name, value in tags.items():
         setattr(data_set, tag_name, value)
     data_set.file_meta = Dataset()
     data_set.is_implicit_VR = False
     data_set.is_little_endian = True
     return data_set
예제 #27
0
    def test_group_dataset(self):
        """Test Dataset.group_dataset"""
        ds = Dataset()
        ds.CommandGroupLength = 120  # 0000,0000
        ds.CommandLengthToEnd = 111  # 0000,0001
        ds.Overlays = 12  # 0000,51B0
        ds.LengthToEnd = 12  # 0008,0001
        ds.SOPInstanceUID = '1.2.3.4'  # 0008,0018
        ds.SkipFrameRangeFlag = 'TEST'  # 0008,9460

        # Test getting group 0x0000
        group0000 = ds.group_dataset(0x0000)
        self.assertTrue('CommandGroupLength' in group0000)
        self.assertTrue('CommandLengthToEnd' in group0000)
        self.assertTrue('Overlays' in group0000)
        self.assertFalse('LengthToEnd' in group0000)
        self.assertFalse('SOPInstanceUID' in group0000)
        self.assertFalse('SkipFrameRangeFlag' in group0000)

        # Test getting group 0x0008
        group0000 = ds.group_dataset(0x0008)
        self.assertFalse('CommandGroupLength' in group0000)
        self.assertFalse('CommandLengthToEnd' in group0000)
        self.assertFalse('Overlays' in group0000)
        self.assertTrue('LengthToEnd' in group0000)
        self.assertTrue('SOPInstanceUID' in group0000)
        self.assertTrue('SkipFrameRangeFlag' in group0000)
예제 #28
0
    def test_version(self):
        """Test that the value for FileMetaInformationVersion is OK."""
        meta = Dataset()
        meta.MediaStorageSOPClassUID = '1.1'
        meta.MediaStorageSOPInstanceUID = '1.2'
        meta.TransferSyntaxUID = '1.3'
        meta.ImplementationClassUID = '1.4'
        _write_file_meta_info(self.fp, meta)

        self.fp.seek(16 + 12)
        self.assertEqual(self.fp.read(2), b'\x00\x01')
예제 #29
0
def on_c_find(dataset):
    basedir = "../test/dicom_files/"
    dcm_files = ["CTImageStorage.dcm"]
    dcm_files = [os.path.join(basedir, x) for x in dcm_files]
    for dcm in dcm_files:
        data = read_file(dcm, force=True)

        d = Dataset()
        d.QueryRetrieveLevel = dataset.QueryRetrieveLevel
        d.RetrieveAETitle = args.aetitle
        d.PatientName = data.PatientName
        yield d
예제 #30
0
    def testEqualityUnknown(self):
        """Dataset: equality returns correct value with extra members """
        d = Dataset()
        d.SOPEustaceUID = "1.2.3.4"
        self.assertTrue(d == d)

        e = Dataset()
        e.SOPEustaceUID = "1.2.3.4"
        self.assertTrue(d == e)

        e.SOPEustaceUID = "1.2.3.5"
        self.assertFalse(d == e)
예제 #31
0
 def test_repr_seq(self):
     """Test DataElement.__repr__ with a sequence"""
     elem = DataElement(0x300A00B0, 'SQ', [Dataset()])
     elem[0].PatientID = '1234'
     assert repr(elem) == repr(elem.value)
예제 #32
0
 def dummy_dataset(self):
     # This dataset is used by many of the tests
     ds = Dataset()
     ds.add_new((0x300a, 0x00b2), "SH", "unit001")  # TreatmentMachineName
     return ds
예제 #33
0
class DatasetElementsTests(unittest.TestCase):
    """Test valid assignments of data elements"""
    def setUp(self):
        self.ds = Dataset()
        self.sub_ds1 = Dataset()
        self.sub_ds2 = Dataset()

    def testSequenceAssignment(self):
        """Assignment to SQ works only if valid Sequence assigned......"""
        def try_non_Sequence():
            self.ds.ConceptCodeSequence = [1, 2, 3]

        msg = ("Assigning non-sequence to "
               "SQ data element did not raise error")
        self.assertRaises(TypeError, try_non_Sequence, msg=msg)
        # check also that assigning proper sequence *does* work
        self.ds.ConceptCodeSequence = [self.sub_ds1, self.sub_ds2]
        self.assertTrue(isinstance(self.ds.ConceptCodeSequence, Sequence),
                        "Sequence assignment did not result in Sequence type")

    def test_ensure_file_meta(self):
        assert not hasattr(self.ds, 'file_meta')
        self.ds.ensure_file_meta()
        assert hasattr(self.ds, 'file_meta')
        assert not self.ds.file_meta

    def test_fix_meta_info(self):
        self.ds.is_little_endian = True
        self.ds.is_implicit_VR = True
        self.ds.fix_meta_info(enforce_standard=False)
        assert self.ds.file_meta.TransferSyntaxUID == ImplicitVRLittleEndian

        self.ds.is_implicit_VR = False
        self.ds.fix_meta_info(enforce_standard=False)
        # transfer syntax does not change because of ambiguity
        assert self.ds.file_meta.TransferSyntaxUID == ImplicitVRLittleEndian

        self.ds.is_little_endian = False
        self.ds.is_implicit_VR = True
        with pytest.raises(NotImplementedError):
            self.ds.fix_meta_info()

        self.ds.is_implicit_VR = False
        self.ds.fix_meta_info(enforce_standard=False)
        assert self.ds.file_meta.TransferSyntaxUID == ExplicitVRBigEndian

        assert 'MediaStorageSOPClassUID' not in self.ds.file_meta
        assert 'MediaStorageSOPInstanceUID ' not in self.ds.file_meta
        with pytest.raises(ValueError, match='Missing required File Meta .*'):
            self.ds.fix_meta_info(enforce_standard=True)

        self.ds.SOPClassUID = '1.2.3'
        self.ds.SOPInstanceUID = '4.5.6'
        self.ds.fix_meta_info(enforce_standard=False)
        assert self.ds.file_meta.MediaStorageSOPClassUID == '1.2.3'
        assert self.ds.file_meta.MediaStorageSOPInstanceUID == '4.5.6'
        self.ds.fix_meta_info(enforce_standard=True)

        self.ds.file_meta.PatientID = 'PatientID'
        with pytest.raises(ValueError,
                           match=r'Only File Meta Information Group '
                           r'\(0002,eeee\) elements must be present .*'):
            self.ds.fix_meta_info(enforce_standard=True)

    def test_validate_and_correct_file_meta(self):
        file_meta = Dataset()
        validate_file_meta(file_meta, enforce_standard=False)
        with pytest.raises(ValueError):
            validate_file_meta(file_meta, enforce_standard=True)

        file_meta.PatientID = 'PatientID'
        for enforce_standard in (True, False):
            with pytest.raises(ValueError,
                               match=r'Only File Meta Information Group '
                               r'\(0002,eeee\) elements must be present .*'):
                validate_file_meta(file_meta,
                                   enforce_standard=enforce_standard)

        file_meta = Dataset()
        file_meta.MediaStorageSOPClassUID = '1.2.3'
        file_meta.MediaStorageSOPInstanceUID = '1.2.4'
        # still missing TransferSyntaxUID
        with pytest.raises(ValueError):
            validate_file_meta(file_meta, enforce_standard=True)

        file_meta.TransferSyntaxUID = ImplicitVRLittleEndian
        validate_file_meta(file_meta, enforce_standard=True)

        # check the default created values
        assert file_meta.FileMetaInformationVersion == b'\x00\x01'
        assert file_meta.ImplementationClassUID == PYDICOM_IMPLEMENTATION_UID
        assert file_meta.ImplementationVersionName.startswith('PYDICOM ')

        file_meta.ImplementationClassUID = '1.2.3.4'
        file_meta.ImplementationVersionName = 'ACME LTD'
        validate_file_meta(file_meta, enforce_standard=True)
        # check that existing values are left alone
        assert file_meta.ImplementationClassUID == '1.2.3.4'
        assert file_meta.ImplementationVersionName == 'ACME LTD'
예제 #34
0
 def test_hash():
     d = Dataset()
     d.PatientName = 'ANON'
     hash(d)
예제 #35
0
    def testEqualitySequence(self):
        """Dataset: equality returns correct value"""
        """when dataset has sequences"""
        # Test even sequences
        d = Dataset()
        d.SOPInstanceUID = '1.2.3.4'
        d.BeamSequence = []
        beam_seq = Dataset()
        beam_seq.PatientID = '1234'
        beam_seq.PatientName = 'ANON'
        d.BeamSequence.append(beam_seq)
        self.assertTrue(d == d)

        e = Dataset()
        e.SOPInstanceUID = '1.2.3.4'
        e.BeamSequence = []
        beam_seq = Dataset()
        beam_seq.PatientName = 'ANON'
        beam_seq.PatientID = '1234'
        e.BeamSequence.append(beam_seq)
        self.assertTrue(d == e)

        e.BeamSequence[0].PatientName = 'ANONY'
        self.assertFalse(d == e)

        # Test uneven sequences
        e.BeamSequence[0].PatientName = 'ANON'
        self.assertTrue(d == e)

        e.BeamSequence.append(beam_seq)
        self.assertFalse(d == e)

        d.BeamSequence.append(beam_seq)
        self.assertTrue(d == e)
        d.BeamSequence.append(beam_seq)
        self.assertFalse(d == e)
예제 #36
0
# Convert PNG and BMP files
if img.format == 'PNG' or img.format == 'BMP':
    img = img.convert('RGB')

# Convert image modes (types/depth of pixels)
# Docs: https://pillow.readthedocs.io/en/3.1.x/handbook/concepts.html
if img.mode == 'L':
    np_frame = ds.PixelData = np_frame.tobytes()
elif img.mode == 'RGBA' or img.mode == 'RGB':
    np_frame = numpy.array(img.getdata(), dtype=numpy.uint8)
else:
    print("Unknown image mode")
    return

# Create DICOM from scratch
ds = Dataset()
ds.file_meta = Dataset()
ds.file_meta.TransferSyntaxUID = pydicom.uid.ExplicitVRLittleEndian
ds.file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.1.1'
ds.file_meta.MediaStorageSOPInstanceUID = "1.2.3"
ds.file_meta.ImplementationClassUID = "1.2.3.4"

ds.PatientName = 'Created'

ds.Rows = img.height
ds.Columns = img.width
ds.PhotometricInterpretation = "YBR_FULL_422"
if np_frame.shape[1] == 3:
    ds.SamplesPerPixel = 3
else:
    ds.SamplesPerPixel = 1
예제 #37
0
 def test_getitem_slice_raises(self):
     """Test Dataset.__getitem__ raises if slice Tags invalid."""
     ds = Dataset()
     self.assertRaises(ValueError, ds.__getitem__, slice(None, -1))
     self.assertRaises(ValueError, ds.__getitem__, slice(-1, -1))
     self.assertRaises(ValueError, ds.__getitem__, slice(-1))
예제 #38
0
def _correct_ambiguous_vr_element(elem: DataElement, ds: Dataset,
                                  is_little_endian: bool) -> DataElement:
    """Implementation for `correct_ambiguous_vr_element`.
    See `correct_ambiguous_vr_element` for description.
    """
    # 'OB or OW': 7fe0,0010 PixelData
    if elem.tag == 0x7fe00010:
        # Compressed Pixel Data
        # PS3.5 Annex A.4
        #   If encapsulated, VR is OB and length is undefined
        if elem.is_undefined_length:
            elem.VR = VR.OB
        elif ds.is_implicit_VR:
            # Non-compressed Pixel Data - Implicit Little Endian
            # PS3.5 Annex A1: VR is always OW
            elem.VR = VR.OW
        else:
            # Non-compressed Pixel Data - Explicit VR
            # PS3.5 Annex A.2:
            # If BitsAllocated is > 8 then VR shall be OW,
            # else may be OB or OW.
            # If we get here, the data has not been written before
            # or has been converted from Implicit Little Endian,
            # so we default to OB for BitsAllocated 1 or 8
            elem.VR = VR.OW if cast(int, ds.BitsAllocated) > 8 else VR.OB

    # 'US or SS' and dependent on PixelRepresentation
    elif elem.tag in _us_ss_tags:
        # US if PixelRepresentation value is 0x0000, else SS
        #   For references, see the list at
        #   https://github.com/darcymason/pydicom/pull/298
        # PixelRepresentation is usually set in the root dataset
        while 'PixelRepresentation' not in ds and ds.parent and ds.parent():
            ds = cast(Dataset, ds.parent())
        # if no pixel data is present, none if these tags is used,
        # so we can just ignore a missing PixelRepresentation in this case
        if ('PixelRepresentation' not in ds and 'PixelData' not in ds
                or ds.PixelRepresentation == 0):
            elem.VR = VR.US
            byte_type = 'H'
        else:
            elem.VR = VR.SS
            byte_type = 'h'

        if elem.VM == 0:
            return elem

        # Need to handle type check for elements with VM > 1
        elem_value = (elem.value if elem.VM == 1 else cast(
            Sequence[Any], elem.value)[0])
        if not isinstance(elem_value, int):
            elem.value = convert_numbers(cast(bytes, elem.value),
                                         is_little_endian, byte_type)

    # 'OB or OW' and dependent on WaveformBitsAllocated
    elif elem.tag in _ob_ow_tags:
        # If WaveformBitsAllocated is > 8 then OW, otherwise may be
        #   OB or OW.
        #   See PS3.3 C.10.9.1.
        if ds.is_implicit_VR:
            elem.VR = VR.OW
        else:
            elem.VR = (VR.OW
                       if cast(int, ds.WaveformBitsAllocated) > 8 else VR.OB)

    # 'US or OW': 0028,3006 LUTData
    elif elem.tag == 0x00283006:
        # First value in LUT Descriptor is how many values in
        #   LUTData, if there's only one value then must be US
        # As per PS3.3 C.11.1.1.1
        if cast(Sequence[int], ds.LUTDescriptor)[0] == 1:
            elem.VR = VR.US
            if elem.VM == 0:
                return elem

            elem_value = (elem.value if elem.VM == 1 else cast(
                Sequence[Any], elem.value)[0])
            if not isinstance(elem_value, int):
                elem.value = convert_numbers(cast(bytes, elem.value),
                                             is_little_endian, 'H')
        else:
            elem.VR = VR.OW

    # 'OB or OW': 60xx,3000 OverlayData and dependent on Transfer Syntax
    elif elem.tag in _overlay_data_tags:
        # Implicit VR must be OW, explicit VR may be OB or OW
        #   as per PS3.5 Section 8.1.2 and Annex A
        elem.VR = VR.OW

    return elem
예제 #39
0
    def __init__(
        self,
        name: Union[Code, CodedConcept],
        value: Optional[Union[int, float]] = None,
        unit: Optional[Union[Code, CodedConcept]] = None,
        qualifier: Optional[Union[Code, CodedConcept]] = None,
        relationship_type: Optional[Union[str,
                                          RelationshipTypeValues]] = None):
        """
        Parameters
        ----------
        name: Union[highdicom.sr.coding.CodedConcept, pydicom.sr.coding.Code]
            concept name
        value: Union[int, float], optional
            numeric value
        unit: Union[highdicom.sr.coding.CodedConcept, pydicom.sr.coding.Code], optional
            coded units of measurement (see CID 7181 "Abstract Multi-dimensional
            Image Model Component Units")
        qualifier: Union[highdicom.sr.coding.CodedConcept, pydicom.sr.coding.Code], optional
            qualification of numeric value or as an alternative to
            numeric value, e.g., reason for absence of numeric value
            (see CID 42 "Numeric Value Qualifier" for options)
        relationship_type: Union[highdicom.sr.enum.RelationshipTypeValues, str], optional
            type of relationship with parent content item

        Note
        ----
        Either `value` and `unit` or `qualifier` must be specified.

        """ # noqa
        super(NumContentItem, self).__init__(ValueTypeValues.NUM, name,
                                             relationship_type)
        if value is not None:
            self.MeasuredValueSequence = []
            measured_value_sequence_item = Dataset()
            if not isinstance(value, (
                    int,
                    float,
            )):
                raise TypeError(
                    'Argument "value" must have type "int" or "float".')
            measured_value_sequence_item.NumericValue = value
            if isinstance(value, float):
                measured_value_sequence_item.FloatingPointValue = value
            if not isinstance(unit, (
                    CodedConcept,
                    Code,
            )):
                raise TypeError(
                    'Argument "unit" must have type CodedConcept or Code.')
            if isinstance(unit, Code):
                unit = CodedConcept(*unit)
            measured_value_sequence_item.MeasurementUnitsCodeSequence = [unit]
            self.MeasuredValueSequence.append(measured_value_sequence_item)
        elif qualifier is not None:
            if not isinstance(qualifier, (
                    CodedConcept,
                    Code,
            )):
                raise TypeError(
                    'Argument "qualifier" must have type "CodedConcept" or '
                    '"Code".')
            if isinstance(qualifier, Code):
                qualifier = CodedConcept(*qualifier)
            self.NumericValueQualifierCodeSequence = [qualifier]
        else:
            raise ValueError(
                'Either argument "value" or "qualifier" must be specified '
                'upon creation of NumContentItem.')
예제 #40
0
def dcmwrite(filename: Union[PathType, BinaryIO],
             dataset: Dataset,
             write_like_original: bool = True) -> None:
    """Write `dataset` to the `filename` specified.

    If `write_like_original` is ``True`` then the :class:`Dataset` will be
    written as is (after minimal validation checking) and may or may not
    contain all or parts of the *File Meta Information* (and hence may or
    may not be conformant with the DICOM File Format).

    If `write_like_original` is ``False``, `dataset` will be stored in the
    :dcm:`DICOM File Format <part10/chapter_7.html>`.  To do
    so requires that the ``Dataset.file_meta`` attribute
    exists and contains a :class:`Dataset` with the required (Type 1) *File
    Meta Information Group* elements. The byte stream of the `dataset` will be
    placed into the file after the DICOM *File Meta Information*.

    **File Meta Information**

    The *File Meta Information* consists of a 128-byte preamble, followed by
    a 4 byte ``b'DICM'`` prefix, followed by the *File Meta Information Group*
    elements.

    **Preamble and Prefix**

    The ``dataset.preamble`` attribute shall be 128-bytes long or ``None`` and
    is available for use as defined by the Application Profile or specific
    implementations. If the preamble is not used by an Application Profile or
    specific implementation then all 128 bytes should be set to ``0x00``. The
    actual preamble written depends on `write_like_original` and
    ``dataset.preamble`` (see the table below).

    +------------------+------------------------------+
    |                  | write_like_original          |
    +------------------+-------------+----------------+
    | dataset.preamble | True        | False          |
    +==================+=============+================+
    | None             | no preamble | 128 0x00 bytes |
    +------------------+-------------+----------------+
    | 128 bytes        | dataset.preamble             |
    +------------------+------------------------------+

    The prefix shall be the bytestring ``b'DICM'`` and will be written if and
    only if the preamble is present.

    **File Meta Information Group Elements**

    The preamble and prefix are followed by a set of DICOM elements from the
    (0002,eeee) group. Some of these elements are required (Type 1) while
    others are optional (Type 3/1C). If `write_like_original` is ``True``
    then the *File Meta Information Group* elements are all optional. See
    :func:`~pydicom.filewriter.write_file_meta_info` for more information on
    which elements are required.

    The *File Meta Information Group* elements should be included within their
    own :class:`~pydicom.dataset.Dataset` in the ``dataset.file_meta``
    attribute.

    If (0002,0010) *Transfer Syntax UID* is included then the user must ensure
    its value is compatible with the values for the
    ``dataset.is_little_endian`` and ``dataset.is_implicit_VR`` attributes.
    For example, if ``is_little_endian`` and ``is_implicit_VR`` are both
    ``True`` then the Transfer Syntax UID must be 1.2.840.10008.1.2 *Implicit
    VR Little Endian*. See the DICOM Standard, Part 5,
    :dcm:`Section 10<part05/chapter_10.html>` for more information on Transfer
    Syntaxes.

    *Encoding*

    The preamble and prefix are encoding independent. The File Meta elements
    are encoded as *Explicit VR Little Endian* as required by the DICOM
    Standard.

    **Dataset**

    A DICOM Dataset representing a SOP Instance related to a DICOM Information
    Object Definition. It is up to the user to ensure the `dataset` conforms
    to the DICOM Standard.

    *Encoding*

    The `dataset` is encoded as specified by the ``dataset.is_little_endian``
    and ``dataset.is_implicit_VR`` attributes. It's up to the user to ensure
    these attributes are set correctly (as well as setting an appropriate
    value for ``dataset.file_meta.TransferSyntaxUID`` if present).

    Parameters
    ----------
    filename : str or PathLike or file-like
        Name of file or the file-like to write the new DICOM file to.
    dataset : pydicom.dataset.FileDataset
        Dataset holding the DICOM information; e.g. an object read with
        :func:`~pydicom.filereader.dcmread`.
    write_like_original : bool, optional
        If ``True`` (default), preserves the following information from
        the Dataset (and may result in a non-conformant file):

        - preamble -- if the original file has no preamble then none will be
          written.
        - file_meta -- if the original file was missing any required *File
          Meta Information Group* elements then they will not be added or
          written.
          If (0002,0000) *File Meta Information Group Length* is present then
          it may have its value updated.
        - seq.is_undefined_length -- if original had delimiters, write them now
          too, instead of the more sensible length characters
        - is_undefined_length_sequence_item -- for datasets that belong to a
          sequence, write the undefined length delimiters if that is
          what the original had.

        If ``False``, produces a file conformant with the DICOM File Format,
        with explicit lengths for all elements.

    Raises
    ------
    AttributeError
        If either ``dataset.is_implicit_VR`` or ``dataset.is_little_endian``
        have not been set.
    ValueError
        If group 2 elements are in ``dataset`` rather than
        ``dataset.file_meta``, or if a preamble is given but is not 128 bytes
        long, or if Transfer Syntax is a compressed type and pixel data is not
        compressed.

    See Also
    --------
    pydicom.dataset.Dataset
        Dataset class with relevant attributes and information.
    pydicom.dataset.Dataset.save_as
        Write a DICOM file from a dataset that was read in with ``dcmread()``.
        ``save_as()`` wraps ``dcmwrite()``.
    """
    tsyntax: Optional[UID]
    try:
        tsyntax = dataset.file_meta.TransferSyntaxUID
    except AttributeError:
        tsyntax = None

    cls_name = dataset.__class__.__name__
    encoding = (dataset.is_implicit_VR, dataset.is_little_endian)

    # Ensure is_little_endian and is_implicit_VR are set
    if None in encoding:
        if tsyntax is None:
            raise AttributeError(
                f"'{cls_name}.is_little_endian' and "
                f"'{cls_name}.is_implicit_VR' must be set appropriately "
                "before saving")

        if not tsyntax.is_private:
            dataset.is_little_endian = tsyntax.is_little_endian
            dataset.is_implicit_VR = tsyntax.is_implicit_VR

    if tsyntax and not tsyntax.is_private:
        # PS3.5 Annex A.4 - the length of encapsulated pixel data is undefined
        #   and native pixel data uses actual length
        if "PixelData" in dataset:
            dataset['PixelData'].is_undefined_length = tsyntax.is_compressed

        # PS3.5 Annex A.4 - encapsulated datasets use Explicit VR Little
        if tsyntax.is_compressed and encoding != (False, True):
            warnings.warn(
                "All encapsulated (compressed) transfer syntaxes must use "
                "explicit VR little endian encoding for the dataset. Set "
                f"'{cls_name}.is_little_endian = True' and '{cls_name}."
                "is_implicit_VR = False' before saving")

    # Check that dataset's group 0x0002 elements are only present in the
    #   `dataset.file_meta` Dataset - user may have added them to the wrong
    #   place
    if dataset.group_dataset(0x0002) != Dataset():
        raise ValueError(
            f"File Meta Information Group Elements (0002,eeee) should be in "
            f"their own Dataset object in the "
            f"'{dataset.__class__.__name__}.file_meta' attribute.")

    # A preamble is required under the DICOM standard, however if
    #   `write_like_original` is True we treat it as optional
    preamble = getattr(dataset, 'preamble', None)
    if preamble and len(preamble) != 128:
        raise ValueError(
            f"'{dataset.__class__.__name__}.preamble' must be 128-bytes long.")
    if not preamble and not write_like_original:
        # The default preamble is 128 0x00 bytes.
        preamble = b'\x00' * 128

    # File Meta Information is required under the DICOM standard, however if
    #   `write_like_original` is True we treat it as optional
    if not write_like_original:
        # the checks will be done in write_file_meta_info()
        dataset.fix_meta_info(enforce_standard=False)
    else:
        dataset.ensure_file_meta()

    # Check for decompression, give warnings if inconsistencies
    # If decompressed, then pixel_array is now used instead of PixelData
    if dataset.is_decompressed:
        if dataset.file_meta.TransferSyntaxUID.is_compressed:
            raise ValueError(
                f"The Transfer Syntax UID element in "
                f"'{dataset.__class__.__name__}.file_meta' is compressed "
                f"but the pixel data has been decompressed")

        # Force PixelData to the decompressed version
        dataset.PixelData = dataset.pixel_array.tobytes()

    caller_owns_file = True
    # Open file if not already a file object
    filename = path_from_pathlike(filename)
    if isinstance(filename, str):
        fp = DicomFile(filename, 'wb')
        # caller provided a file name; we own the file handle
        caller_owns_file = False
    else:
        try:
            fp = DicomFileLike(filename)
        except AttributeError:
            raise TypeError("dcmwrite: Expected a file path or a file-like, "
                            "but got " + type(filename).__name__)
    try:
        # WRITE FILE META INFORMATION
        if preamble:
            # Write the 'DICM' prefix if and only if we write the preamble
            fp.write(preamble)
            fp.write(b'DICM')

        tsyntax = None
        if dataset.file_meta:  # May be an empty Dataset
            # If we want to `write_like_original`, don't enforce_standard
            write_file_meta_info(fp,
                                 dataset.file_meta,
                                 enforce_standard=not write_like_original)
            tsyntax = cast(
                UID, getattr(dataset.file_meta, "TransferSyntaxUID", None))

        if tsyntax == DeflatedExplicitVRLittleEndian:
            # See PS3.5 section A.5
            # when writing, the entire dataset following
            #     the file metadata is prepared the normal way,
            #     then "deflate" compression applied.
            buffer = DicomBytesIO()
            _write_dataset(buffer, dataset, write_like_original)

            # Compress the encoded data and write to file
            compressor = zlib.compressobj(wbits=-zlib.MAX_WBITS)
            deflated = compressor.compress(
                buffer.parent.getvalue()  # type: ignore[union-attr]
            )
            deflated += compressor.flush()
            if len(deflated) % 2:
                deflated += b'\x00'

            fp.write(deflated)
        else:
            _write_dataset(fp, dataset, write_like_original)

    finally:
        if not caller_owns_file:
            fp.close()
예제 #41
0
    def test_sort_order(self):
        """Test that tags are serialized in ascending order."""
        ds = Dataset()
        ds.add_new(0x00100040, 'CS', 'F')
        ds.add_new(0x00100030, 'DA', '20000101')
        ds.add_new(0x00100020, 'LO', '0017')
        ds.add_new(0x00100010, 'PN', 'Jane^Doe')

        ds_json = ds.to_json()
        assert ds_json.index('"00100010"') < ds_json.index('"00100020"')
        assert ds_json.index('"00100020"') < ds_json.index('"00100030"')
        assert ds_json.index('"00100030"') < ds_json.index('"00100040"')
예제 #42
0
    def test_pn_components_to_json(self):
        def check_name(tag, components):
            # we cannot directly compare the dictionaries, as they are not
            # ordered in Python 2
            value = ds_json[tag]['Value']
            assert 1 == len(value)
            value = value[0]
            if len(components) == 3:
                assert components[2] == value['Phonetic']
            else:
                assert 'Phonetic' not in value
            if len(components) >= 2:
                assert components[1] == value['Ideographic']
            else:
                assert 'Ideographic' not in value
            assert components[0] == value['Alphabetic']

        ds = Dataset()
        ds.add_new(0x00100010, 'PN', u'Yamada^Tarou=山田^太郎=やまだ^たろう')
        ds.add_new(0x00091001, 'PN', u'Yamada^Tarou')
        ds.add_new(0x00091002, 'PN', u'Yamada^Tarou==')
        ds.add_new(0x00091003, 'PN', u'=山田^太郎=やまだ^たろう')
        ds.add_new(0x00091004, 'PN', u'Yamada^Tarou==やまだ^たろう')
        ds.add_new(0x00091005, 'PN', u'==やまだ^たろう')
        ds.add_new(0x00091006, 'PN', u'=山田^太郎')
        ds.add_new(0x00091007, 'PN', u'Yamada^Tarou=山田^太郎')
        ds_json = ds.to_json_dict()
        check_name('00100010', ['Yamada^Tarou', u'山田^太郎', u'やまだ^たろう'])
        check_name('00091001', ['Yamada^Tarou'])
        check_name('00091002', ['Yamada^Tarou'])
        check_name('00091003', ['', u'山田^太郎', u'やまだ^たろう'])
        check_name('00091004', ['Yamada^Tarou', '', u'やまだ^たろう'])
        check_name('00091005', ['', '', u'やまだ^たろう'])
        check_name('00091006', ['', u'山田^太郎'])
        check_name('00091007', ['Yamada^Tarou', u'山田^太郎'])
예제 #43
0
 def test_empty_value(self):
     ds = Dataset()
     ds.add_new(0x00100010, 'PN', '')
     ds_json = ds.to_json_dict()
     assert '00100010' in ds_json
     assert 'Value' not in ds_json['00100010']
예제 #44
0
def _create_temporary_dataset(shape=(100, 1024, 1024, 3), bit_depth=16):
    """Function to create a temporary dataset for use in testing.

    Parameters
    ----------
    shape : 4-tuple
        The (frames, rows, columns, channels) of the test dataset.
    bit_depth : int
        The BitsAllocated value to use for the dataset, one of 8, 16, 32, 64.

    Returns
    -------
    tempfile.TemporaryFile
        A created DICOM File Format conformant dataset.
    """
    ds = Dataset()
    ds.is_little_endian = True
    ds.is_implicit_VR = False
    ds.file_meta = Dataset()
    ds.file_meta.TransferSyntaxUID = ExplicitVRLittleEndian
    ds.SOPClassUID = '1.2.3.4'
    ds.SOPInstanceUID = generate_uid()
    ds.BitsAllocated = bit_depth
    ds.PixelRepresentation = 0
    ds.PlanarConfiguration = 0
    ds.Rows = shape[1]
    ds.Columns = shape[2]
    ds.NumberOfFrames = shape[0]
    ds.SamplesPerPixel = shape[3]
    if shape[3] == 1:
        ds.PhotometricInterpretation = 'MONOCHROME2'
    elif shape[3] == 3:
        ds.PhotometricInterpretation = 'RGB'

    arr = np.zeros(shape, dtype='uint{}'.format(bit_depth))
    ds.PixelData = arr.tobytes()

    if len(ds.PixelData) % 2:
        ds.PixelData += b'\x00'

    tfile = TemporaryFile(mode='w+b')
    ds.save_as(tfile, write_like_original=False)
    tfile.seek(0)

    return tfile
예제 #45
0
    def test_getitem_slice(self):
        """Test Dataset.__getitem__ using slices."""
        ds = Dataset()
        ds.CommandGroupLength = 120  # 0000,0000
        ds.CommandLengthToEnd = 111  # 0000,0001
        ds.Overlays = 12  # 0000,51B0
        ds.LengthToEnd = 12  # 0008,0001
        ds.SOPInstanceUID = '1.2.3.4'  # 0008,0018
        ds.SkipFrameRangeFlag = 'TEST'  # 0008,9460
        ds.add_new(0x00090001, 'PN', 'CITIZEN^1')
        ds.add_new(0x00090002, 'PN', 'CITIZEN^2')
        ds.add_new(0x00090003, 'PN', 'CITIZEN^3')
        ds.add_new(0x00090004, 'PN', 'CITIZEN^4')
        ds.add_new(0x00090005, 'PN', 'CITIZEN^5')
        ds.add_new(0x00090006, 'PN', 'CITIZEN^6')
        ds.add_new(0x00090007, 'PN', 'CITIZEN^7')
        ds.add_new(0x00090008, 'PN', 'CITIZEN^8')
        ds.add_new(0x00090009, 'PN', 'CITIZEN^9')
        ds.add_new(0x00090010, 'PN', 'CITIZEN^10')
        ds.PatientName = 'CITIZEN^Jan'  # 0010,0010
        ds.PatientID = '12345'  # 0010,0010
        ds.ExaminedBodyThickness = 1.223  # 0010,9431
        ds.BeamSequence = [Dataset()]  # 300A,00B0
        ds.BeamSequence[0].PatientName = 'ANON'

        # Slice all items - should return original dataset
        self.assertEqual(ds[:], ds)

        # Slice starting from and including (0008,0001)
        test_ds = ds[0x00080001:]
        self.assertFalse('CommandGroupLength' in test_ds)
        self.assertFalse('CommandLengthToEnd' in test_ds)
        self.assertFalse('Overlays' in test_ds)
        self.assertTrue('LengthToEnd' in test_ds)
        self.assertTrue('BeamSequence' in test_ds)

        # Slice ending at and not including (0009,0002)
        test_ds = ds[:0x00090002]
        self.assertTrue('CommandGroupLength' in test_ds)
        self.assertTrue('CommandLengthToEnd' in test_ds)
        self.assertTrue('Overlays' in test_ds)
        self.assertTrue('LengthToEnd' in test_ds)
        self.assertTrue(0x00090001 in test_ds)
        self.assertFalse(0x00090002 in test_ds)
        self.assertFalse('BeamSequence' in test_ds)

        # Slice with a step - every second tag
        # Should return zeroth tag, then second, fourth, etc...
        test_ds = ds[::2]
        self.assertTrue('CommandGroupLength' in test_ds)
        self.assertFalse('CommandLengthToEnd' in test_ds)
        self.assertTrue(0x00090001 in test_ds)
        self.assertFalse(0x00090002 in test_ds)

        # Slice starting at and including (0008,0018) and ending at and not
        #   including (0009,0008)
        test_ds = ds[0x00080018:0x00090008]
        self.assertTrue('SOPInstanceUID' in test_ds)
        self.assertTrue(0x00090007 in test_ds)
        self.assertFalse(0x00090008 in test_ds)

        # Slice starting at and including (0008,0018) and ending at and not
        #   including (0009,0008), every third element
        test_ds = ds[0x00080018:0x00090008:3]
        self.assertTrue('SOPInstanceUID' in test_ds)
        self.assertFalse(0x00090001 in test_ds)
        self.assertTrue(0x00090002 in test_ds)
        self.assertFalse(0x00090003 in test_ds)
        self.assertFalse(0x00090004 in test_ds)
        self.assertTrue(0x00090005 in test_ds)
        self.assertFalse(0x00090006 in test_ds)
        self.assertFalse(0x00090008 in test_ds)

        # Slice starting and ending (and not including) (0008,0018)
        self.assertEqual(ds[(0x0008, 0x0018):(0x0008, 0x0018)], Dataset())

        # Test slicing using other acceptable Tag initialisations
        self.assertTrue('SOPInstanceUID' in ds[(0x00080018):(0x00080019)])
        self.assertTrue('SOPInstanceUID' in ds[(0x0008, 0x0018):(0x0008,
                                                                 0x0019)])
        self.assertTrue('SOPInstanceUID' in ds['0x00080018':'0x00080019'])
예제 #46
0
 def test_iterall(self):
     """Test Dataset.iterall"""
     ds = Dataset()
     ds.CommandGroupLength = 120
     ds.SkipFrameRangeFlag = 'TEST'
     ds.add_new(0x00090001, 'PN', 'CITIZEN^1')
     ds.BeamSequence = [Dataset()]
     ds.BeamSequence[0].PatientName = 'ANON'
     elem_gen = ds.iterall()
     self.assertEqual(ds.data_element('CommandGroupLength'), next(elem_gen))
     self.assertEqual(ds.data_element('SkipFrameRangeFlag'), next(elem_gen))
     self.assertEqual(ds[0x00090001], next(elem_gen))
     self.assertEqual(ds.data_element('BeamSequence'), next(elem_gen))
     self.assertEqual(ds.BeamSequence[0].data_element('PatientName'),
                      next(elem_gen))
예제 #47
0
 def test_setitem_slice_raises(self):
     """Test Dataset.__setitem__ raises if slicing used."""
     ds = Dataset()
     self.assertRaises(NotImplementedError, ds.__setitem__, slice(None),
                       Dataset())
예제 #48
0
 def test_data_element(self):
     """Test Dataset.data_element."""
     ds = Dataset()
     ds.CommandGroupLength = 120
     ds.SkipFrameRangeFlag = 'TEST'
     ds.add_new(0x00090001, 'PN', 'CITIZEN^1')
     ds.BeamSequence = [Dataset()]
     ds.BeamSequence[0].PatientName = 'ANON'
     assert ds.data_element('CommandGroupLength') == ds[0x00000000]
     assert ds.data_element('BeamSequence') == ds[0x300A00B0]
     assert ds.data_element('not an element keyword') is None
예제 #49
0
 def testEqualityNotDataset(self):
     """Dataset: equality returns correct value when not the same class"""
     d = Dataset()
     d.SOPInstanceUID = '1.2.3.4'
     self.assertFalse(d == {'SOPInstanceUID': '1.2.3.4'})
예제 #50
0
    def test_remove_private_tags(self):
        """Test Dataset.remove_private_tags"""
        ds = Dataset()
        ds.CommandGroupLength = 120  # 0000,0000
        ds.SkipFrameRangeFlag = 'TEST'  # 0008,9460
        ds.add_new(0x00090001, 'PN', 'CITIZEN^1')
        ds.add_new(0x00090010, 'PN', 'CITIZEN^10')
        ds.PatientName = 'CITIZEN^Jan'  # 0010,0010

        ds.remove_private_tags()
        self.assertEqual(ds[0x00090000:0x00100000], Dataset())
        self.assertTrue('CommandGroupLength' in ds)
        self.assertTrue('SkipFrameRangeFlag' in ds)
        self.assertTrue('PatientName' in ds)
예제 #51
0
    def testEqualityNoSequence(self):
        """Dataset: equality returns correct value with simple dataset"""
        # Test empty dataset
        assert Dataset() == Dataset()

        d = Dataset()
        d.SOPInstanceUID = '1.2.3.4'
        d.PatientName = 'Test'
        assert d == d

        e = Dataset()
        e.PatientName = 'Test'
        e.SOPInstanceUID = '1.2.3.4'
        assert d == e

        e.SOPInstanceUID = '1.2.3.5'
        assert not d == e

        # Check VR
        del e.SOPInstanceUID
        e.add(DataElement(0x00080018, 'PN', '1.2.3.4'))
        assert not d == e

        # Check Tag
        del e.SOPInstanceUID
        e.StudyInstanceUID = '1.2.3.4'
        assert not d == e

        # Check missing Element in self
        e.SOPInstanceUID = '1.2.3.4'
        assert not d == e

        # Check missing Element in other
        d = Dataset()
        d.SOPInstanceUID = '1.2.3.4'
        d.StudyInstanceUID = '1.2.3.4.5'

        e = Dataset()
        e.SOPInstanceUID = '1.2.3.4'
        assert not d == e
예제 #52
0
    def test_is_original_encoding(self):
        """Test Dataset.write_like_original"""
        ds = Dataset()
        assert not ds.is_original_encoding

        # simulate reading
        ds.SpecificCharacterSet = 'ISO_IR 100'
        ds.set_original_encoding(True, True, ['latin_1'])
        assert not ds.is_original_encoding

        ds.is_little_endian = True
        ds.is_implicit_VR = True
        assert ds.is_original_encoding
        # changed character set
        ds.SpecificCharacterSet = 'ISO_IR 192'
        assert not ds.is_original_encoding
        # back to original character set
        ds.SpecificCharacterSet = 'ISO_IR 100'
        assert ds.is_original_encoding
        ds.is_little_endian = False
        assert not ds.is_original_encoding
        ds.is_little_endian = True
        ds.is_implicit_VR = False
        assert not ds.is_original_encoding
예제 #53
0
 def setUp(self):
     self.ds = Dataset()
     self.sub_ds1 = Dataset()
     self.sub_ds2 = Dataset()
예제 #54
0
    def test_get_item_slice(self):
        """Test Dataset.get_item with slice argument"""
        # adapted from test_getitem_slice
        ds = Dataset()
        ds.CommandGroupLength = 120  # 0000,0000
        ds.CommandLengthToEnd = 111  # 0000,0001
        ds.Overlays = 12  # 0000,51B0
        ds.LengthToEnd = 12  # 0008,0001
        ds.SOPInstanceUID = '1.2.3.4'  # 0008,0018
        ds.SkipFrameRangeFlag = 'TEST'  # 0008,9460
        ds.add_new(0x00090001, 'PN', 'CITIZEN^1')
        ds.add_new(0x00090002, 'PN', 'CITIZEN^2')
        ds.add_new(0x00090003, 'PN', 'CITIZEN^3')
        elem = RawDataElement(0x00090004, 'PN', 9, b'CITIZEN^4', 0, True, True)
        ds.__setitem__(0x00090004, elem)
        elem = RawDataElement(0x00090005, 'PN', 9, b'CITIZEN^5', 0, True, True)
        ds.__setitem__(0x00090005, elem)
        elem = RawDataElement(0x00090006, 'PN', 9, b'CITIZEN^6', 0, True, True)
        ds.__setitem__(0x00090006, elem)
        ds.PatientName = 'CITIZEN^Jan'  # 0010,0010
        elem = RawDataElement(0x00100020, 'LO', 5, b'12345', 0, True, True)
        ds.__setitem__(0x00100020, elem)  # Patient ID
        ds.ExaminedBodyThickness = 1.223  # 0010,9431
        ds.BeamSequence = [Dataset()]  # 300A,00B0
        ds.BeamSequence[0].PatientName = 'ANON'

        # Slice all items - should return original dataset
        assert ds.get_item(slice(None, None)) == ds

        # Slice starting from and including (0008,0001)
        test_ds = ds.get_item(slice(0x00080001, None))
        assert 'CommandGroupLength' not in test_ds
        assert 'CommandLengthToEnd' not in test_ds
        assert 'Overlays' not in test_ds
        assert 'LengthToEnd' in test_ds
        assert 'BeamSequence' in test_ds

        # Slice ending at and not including (0009,0002)
        test_ds = ds.get_item(slice(None, 0x00090002))
        assert 'CommandGroupLength' in test_ds
        assert 'CommandLengthToEnd' in test_ds
        assert 'Overlays' in test_ds
        assert 'LengthToEnd' in test_ds
        assert 0x00090001 in test_ds
        assert 0x00090002 not in test_ds
        assert 'BeamSequence' not in test_ds

        # Slice with a step - every second tag
        # Should return zeroth tag, then second, fourth, etc...
        test_ds = ds.get_item(slice(None, None, 2))
        assert 'CommandGroupLength' in test_ds
        assert 'CommandLengthToEnd' not in test_ds
        assert 0x00090001 in test_ds
        assert 0x00090002 not in test_ds

        # Slice starting at and including (0008,0018) and ending at and not
        #   including (0009,0008)
        test_ds = ds.get_item(slice(0x00080018, 0x00090006))
        assert 'SOPInstanceUID' in test_ds
        assert 0x00090005 in test_ds
        assert 0x00090006 not in test_ds

        # Slice starting at and including (0008,0018) and ending at and not
        #   including (0009,0006), every third element
        test_ds = ds.get_item(slice(0x00080018, 0x00090008, 3))
        assert 'SOPInstanceUID' in test_ds
        assert 0x00090001 not in test_ds
        assert 0x00090002 in test_ds
        assert not test_ds.get_item(0x00090002).is_raw
        assert 0x00090003 not in test_ds
        assert 0x00090004 not in test_ds
        assert 0x00090005 in test_ds
        assert test_ds.get_item(0x00090005).is_raw
        assert 0x00090006 not in test_ds

        # Slice starting and ending (and not including) (0008,0018)
        assert ds.get_item(slice((0x0008, 0x0018),
                                 (0x0008, 0x0018))) == Dataset()

        # Test slicing using other acceptable Tag initialisations
        assert 'SOPInstanceUID' in ds.get_item(slice(0x00080018, 0x00080019))
        assert 'SOPInstanceUID' in ds.get_item(
            slice((0x0008, 0x0018), (0x0008, 0x0019)))
        assert 'SOPInstanceUID' in ds.get_item(
            slice('0x00080018', '0x00080019'))
예제 #55
0
 def test_formatted_lines_known_uid(self):
     """Test that the UID name is output when known."""
     ds = Dataset()
     ds.TransferSyntaxUID = '1.2.840.10008.1.2'
     assert 'Implicit VR Little Endian' in str(ds)
예제 #56
0
    def test_delitem_slice(self):
        """Test Dataset.__delitem__ using slices."""
        ds = Dataset()
        ds.CommandGroupLength = 120  # 0000,0000
        ds.CommandLengthToEnd = 111  # 0000,0001
        ds.Overlays = 12  # 0000,51B0
        ds.LengthToEnd = 12  # 0008,0001
        ds.SOPInstanceUID = '1.2.3.4'  # 0008,0018
        ds.SkipFrameRangeFlag = 'TEST'  # 0008,9460
        ds.add_new(0x00090001, 'PN', 'CITIZEN^1')
        ds.add_new(0x00090002, 'PN', 'CITIZEN^2')
        ds.add_new(0x00090003, 'PN', 'CITIZEN^3')
        ds.add_new(0x00090004, 'PN', 'CITIZEN^4')
        ds.add_new(0x00090005, 'PN', 'CITIZEN^5')
        ds.add_new(0x00090006, 'PN', 'CITIZEN^6')
        ds.add_new(0x00090007, 'PN', 'CITIZEN^7')
        ds.add_new(0x00090008, 'PN', 'CITIZEN^8')
        ds.add_new(0x00090009, 'PN', 'CITIZEN^9')
        ds.add_new(0x00090010, 'PN', 'CITIZEN^10')
        ds.PatientName = 'CITIZEN^Jan'  # 0010,0010
        ds.PatientID = '12345'  # 0010,0010
        ds.ExaminedBodyThickness = 1.223  # 0010,9431
        ds.BeamSequence = [Dataset()]  # 300A,00B0
        ds.BeamSequence[0].PatientName = 'ANON'

        # Delete the 0x0009 group
        del ds[0x00090000:0x00100000]
        self.assertTrue('SkipFrameRangeFlag' in ds)
        self.assertFalse(0x00090001 in ds)
        self.assertFalse(0x00090010 in ds)
        self.assertTrue('PatientName' in ds)
예제 #57
0
    def _retrieve( self, PatientID:str=None, 
                 StudyInstanceUID:str=None, 
                 SeriesInstanceUID:str=None, 
                 SOPInstanceUID:str=None,
                 override:bool=False,
                 subPath:str=""
                 ):
        '''DICOM Datensätze vom Server holen
        

        Parameters
        ----------
        PatientID : str, optional
            Eine PatientenID. The default is None.
        StudyInstanceUID : str, optional
            Eine StudyInstanceUID. The default is None.
        SeriesInstanceUID : str, optional
            Eine SeriesInstanceUID. The default is None.
        SOPInstanceUID : str, optional
            Eine SOPInstanceUID. The default is None.
        override : bool, optional
            das holen der Daten über einen externen Aufruf machen.
            wird von getdicom.py verwendet und dort auf False gesetzt. The default is False.
        subPath : str, optional
            ergänzt den lokalen Ablageort um subPath

        evt_handlers
        ------------
        EVT_C_STORE
        EVT_REJECTED
        EVT_ACCEPTED
        EVT_ABORTED
        
        Signals
        -------
        dicom.EVT_C_STORE
        

        Returns
        -------
        status : Dicom Status
            - 0x0000 - daten vorhanden/gelesen | load archive | run EVT_C_STORE
            - 0xC5F1 - keine PatientID
            - 0xC0FF - initAE: Verbindung fehlgeschlagen
            - 0xC512 - 
            - 0xC515 - Address/Port already in use
            
            
        '''
        #print( "dicomClass : retrieve", PatientID, SOPInstanceUID)
        '''
        if not PatientID:
            logger.warning("dicomClass.retrieve: keine PatientID")
            signal( 'dicom.EVT_C_STORE').send( { 
                    "name": "EVT_C_STORE",
                    '_is_cancelled': True,
                    #"dataset": None,
                    "status":0xC5F1,
                    "msg" : "keine PatientID"
            } )
            return 0xC5F1
        '''
        
      
        # override Status merken
        self.override = override
        
        # subPath merken
        self.subPath = subPath
        
        # Create our Identifier (query) dataset
        ds = Dataset()
        
        #auf welchem Level soll abgefragt werden
        #ds.QueryRetrieveLevel = 'SERIES'
        if PatientID:
            ds.QueryRetrieveLevel = 'PATIENT'
            # Unique key for PATIENT level
            ds.PatientID = PatientID
    
        # Unique key for STUDY level
        if StudyInstanceUID:
            ds.QueryRetrieveLevel = 'STUDY'
            ds.StudyInstanceUID = str(StudyInstanceUID)
       
        # Unique key for SERIES 
        if SeriesInstanceUID:
            ds.QueryRetrieveLevel = 'SERIES'
            ds.SeriesInstanceUID = str(SeriesInstanceUID)
            
        # Unique key for IMAGE 
        if SOPInstanceUID:
            ds.QueryRetrieveLevel = 'IMAGE'
            ds.SOPInstanceUID = str(SOPInstanceUID)
            
        ds.Modality = 'RTIMAGE'
        
        # info QueryRetrieveLevel ausgeben
        logger.debug( "dicomClass._retrieve: QueryRetrieveLevel {}".format( ds.QueryRetrieveLevel ) )              
              
        # bei image level versuchen aus dem Dateiarchiv zu lesen statt vom Server zu holen
        if ds.QueryRetrieveLevel == 'IMAGE' and not override:
            # info 
            logger.debug( "dicomClass._retrieve: search archive {}".format( ds.SOPInstanceUID ) )              
            # file aus dem archiv laden
            file_ds = self.archive_loadSOPInstanceUID( ds.SOPInstanceUID )
            # konnte gelesen werden dann raus hier
            if file_ds:
                self.dicomData[ ds.SOPInstanceUID ] = file_ds
                
                logger.debug( "dicomClass._retrieve: load archive {}".format( ds.SOPInstanceUID ) )
       
                signal( 'dicom.EVT_C_STORE').send( { 
                    "name": "EVT_C_STORE",
                    '_is_cancelled': False,
                    "status":0x0000,
                    "msg" : "load archive",
                    "dataset": ds, # Dataset mitgeben (fertig) 
                } )
                return 0x0000
            else:
                logger.info( "dicomClass._retrieve: no archive {}".format( ds.SOPInstanceUID ) )
       
        #
        # ansonsten wird hier versucht neu zu laden
        #
        
        # Verbindung ggf herstellen
        if not self.assoc:
            status = self.initAE()
            # und testen
            if not self.assoc:
                logger.warning("dicomClass._retrieve: Verbindung fehlgeschlagen")     
                signal( 'dicom.EVT_C_STORE').send( { 
                    "name": "EVT_C_STORE",
                    '_is_cancelled': True,
                    "status": status,
                    "msg" : "initAE: Verbindung fehlgeschlagen",
                    # "dataset": None,
                } )
                return status
            
        # Informationen zur Verbindung 
        logger.debug( "dicomClass._retrieve: {}".format( self.aeInfo() ) )

        #print( "dicomClass.retrieve: QueryRetrieveLevel {}".format( ds.QueryRetrieveLevel ) )
        # wenn noch nicht passiert dann server zum empfangen der daten starten
        if not self.scp:
            # message id zurpcksetzen
            self.messageId = 0
            #print( self.scp )
            # handler zum empfang der Daten bereitstellen
            handlers = [
                 ( evt.EVT_C_STORE, self.handle_STORE),
                 ( evt.EVT_ACCEPTED, self.handle_event),
                 ( evt.EVT_ABORTED, self.handle_event),
                 
                 ( evt.EVT_REJECTED, self.handle_event),
                 ( evt.EVT_RELEASED, self.handle_event),
                 ( evt.EVT_REQUESTED, self.handle_event),

                 ( evt.EVT_DIMSE_SENT, self.handle_event),
                 ( evt.EVT_DIMSE_RECV, self.handle_event),
                 #( evt.EVT_PDU_RECV, self.handle_event),
                 #( evt.EVT_PDU_SENT, self.handle_event),
                 
            ]
            # 
            # Server starten um die Daten zu empfangen storage SCP on port listen_port
            self.ae.ae_title = self.config.dicom[self.server]['aet']
            sig_msg = None
            try:  
                logger.debug( "dicomClass._retrieve:  start server" )
                # If set to non-blocking then a running ``ThreadedAssociationServer``
                # instance will be returned. This can be stopped using ``shutdown()``.
                self.scp = self.ae.start_server(
                    ('', self.config.dicom[self.server]['listen_port']), 
                    block=False, # Abfrage über Thread
                    evt_handlers=handlers
                )
                #print( "dicomClass.retrieve: start server" )
                
            except OSError as e:
                #print( "dicomClass.retrieve: 0xC515 - {}".format( str(e) )  )
                logger.error( "dicomClass._retrieve: 0xC515 - {}".format( str(e) ) )
                sig_msg = { 
                    "name": "EVT_C_STORE",
                    "_is_cancelled": True,
                    "status": 0xC515,
                    "msg" : "{}".format( str(e) ),
                    # "dataset": ds,
                }
                # The user’s implementation of the on_c_move callback failed to yield a valid (address, port) pair
               
            except:
                sig_msg = {
                    "name": "EVT_C_STORE",
                    "_is_cancelled": True,
                    "status": 0xC515,
                    "msg" : "Fehler bei start listen server",
                    # "dataset": ds,
                }
                                
                logger.error( "dicomClass._retrieve: ERROR start listen server" )
             
            # nach einem Fehler signal senden 
            if not sig_msg == None:
               # print( "sig_msg", sig_msg )
                signal( 'dicom.EVT_C_STORE' ).send( sig_msg )
                return 0xC515
            
        
        # Use the C-MOVE service to send the identifier
        # A query_model value of 'P' means use the 'Patient Root Query
        #   Retrieve Information Model - Move' presentation context
        # in pynetdicom.status
        
        # save Error
        result = 0xC512

        try:
            #print( "dicomClass.assoc.send_c_move", self.assoc.is_established, self.scp, self.assoc )
            self.messageId += 1
            responses = self.assoc.send_c_move(
                ds, 
                self.config.dicom[self.server]['aet'], 
                query_model='S',
                msg_id = self.messageId
            )
            
            #print( "dicomClass : .assoc.send_c_move response" )
            i = 0
            for (status, identifier) in responses:
                
                i += 1
                if status:
                    result = status.Status
                    #print( "dicomClass : .assoc.send_c_move - response", hex(result), identifier )
                    #
                    logger.debug( 'dicomClass._retrieve: {} - C-MOVE query status: {}'.format( i, hex(result) ) )
                    
                    # If the status is 'Pending' then the identifier is the C-MOVE response
                    
                    # Pending
                    #   | ``0xFF00`` - Sub-operations are continuing
                    #   Der weitere Ablauf wird über retrieve_thread abgewickelt
                    if status.Status in (0xFF00, 0xFF01):
                        if identifier:
                            print( "dicomClass._retrieve:  0xFF00, 0xFF01",  identifier )
                            pass
                    elif status.Status == 0x0000:
                        if identifier:
                            print( "dicomClass._retrieve:  0x0000",  identifier)
                        pass
                    elif status.Status == 0xc002:
                        # User’s callback implementation returned an invalid status object (not a pydicom Dataset or an int)
                        if identifier:
                            print( "dicomClass._retrieve:  0xc002",  identifier)
                            
                    elif status.Status  in (0xC511, 0xC512):
                        logger.error( "dicomClass._retrieve: Fehler beim speichern der DICOM Daten" )
                        if identifier:
                            print( "dicomClass._retrieve 0xC511",  identifier)

                else:
                    logger.warning('Connection timed out, was aborted or received invalid response')
                             
            logger.debug('dicomClass._retrieve: response ready')
        except Exception as e:
            # alle sonstigen Fehler abfangen
            logger.warning("dicomClass._retrieve: Fehler beim holen der '{}' DICOM Daten: {}".format( ds.QueryRetrieveLevel, e ))
            pass
        
        logger.debug("dicomClass._retrieve: DICOM Daten holen: {} - {}".format( hex(result), SOPInstanceUID ) )
        
        # print("DICOM Daten holen: {}".format( SOPInstanceUID ), hex(result) )
        # Completely shutdown the server and close it's socket.
        
        # wenn nicht pending (retrieve_thread übernimmt) EVT_C_STORE mit _is_cancelled senden 
        # 0xff00 - Pending
        # 0x0000 - Success
        if not result in ( 0x0000, 0xff00):
        #if not result == 0xff00:
            signal( 'dicom.EVT_C_STORE').send( { 
                    "name": "EVT_C_STORE",
                    "_is_cancelled": True,
                    "status": result,
                    "msg": "run EVT_C_STORE",
                    #  "dataset": ds,
            } )
            
            
        #self.scp.shutdown()
        #self.scp = None
        
        return result
예제 #58
0
    def test_getitem_slice_ffff(self):
        """Test slicing with (FFFF,FFFF)"""
        # Issue #92
        ds = Dataset()
        ds.CommandGroupLength = 120  # 0000,0000
        ds.CommandLengthToEnd = 111  # 0000,0001
        ds.Overlays = 12  # 0000,51B0
        ds.LengthToEnd = 12  # 0008,0001
        ds.SOPInstanceUID = '1.2.3.4'  # 0008,0018
        ds.SkipFrameRangeFlag = 'TEST'  # 0008,9460
        ds.add_new(0xFFFF0001, 'PN', 'CITIZEN^1')
        ds.add_new(0xFFFF0002, 'PN', 'CITIZEN^2')
        ds.add_new(0xFFFF0003, 'PN', 'CITIZEN^3')
        ds.add_new(0xFFFFFFFE, 'PN', 'CITIZEN^4')
        ds.add_new(0xFFFFFFFF, 'PN', 'CITIZEN^5')

        assert ds[:][0xFFFFFFFF].value == 'CITIZEN^5'
        assert 0xFFFFFFFF not in ds[0x1000:0xFFFFFFFF]
        assert 0xFFFFFFFF not in ds[(0x1000):(0xFFFF, 0xFFFF)]
예제 #59
0
    def test_roundtrip(self):
        ds = Dataset()
        ds.add_new(0x00080005, 'CS', 'ISO_IR 100')
        ds.add_new(0x00090010, 'LO', 'Creator 1.0')
        ds.add_new(0x00091001, 'SH', 'Version1')
        ds.add_new(0x00091002, 'OB', b'BinaryContent')
        ds.add_new(0x00091003, 'OW', b'\x0102\x3040\x5060')
        ds.add_new(0x00091004, 'OF', b'\x00\x01\x02\x03\x04\x05\x06\x07')
        ds.add_new(
            0x00091005, 'OD', b'\x00\x01\x02\x03\x04\x05\x06\x07'
            b'\x01\x01\x02\x03\x04\x05\x06\x07')
        ds.add_new(0x00091006, 'OL', b'\x00\x01\x02\x03\x04\x05\x06\x07'
                   b'\x01\x01\x02\x03')
        ds.add_new(0x00091007, 'UI', '1.2.3.4.5.6')
        ds.add_new(0x00091008, 'DA', '20200101')
        ds.add_new(0x00091009, 'TM', '115500')
        ds.add_new(0x0009100a, 'DT', '20200101115500.000000')
        ds.add_new(0x0009100b, 'UL', 3000000000)
        ds.add_new(0x0009100c, 'SL', -2000000000)
        ds.add_new(0x0009100d, 'US', 40000)
        ds.add_new(0x0009100e, 'SS', -22222)
        ds.add_new(0x0009100f, 'FL', 3.14)
        ds.add_new(0x00091010, 'FD', 3.14159265)
        ds.add_new(0x00091011, 'CS', 'TEST MODE')
        ds.add_new(0x00091012, 'PN', 'CITIZEN^1')
        ds.add_new(0x00091013, 'PN', u'Yamada^Tarou=山田^太郎=やまだ^たろう')
        ds.add_new(0x00091014, 'IS', '42')
        ds.add_new(0x00091015, 'DS', '3.14159265')
        ds.add_new(0x00091016, 'AE', b'CONQUESTSRV1')
        ds.add_new(0x00091017, 'AS', '055Y')
        ds.add_new(0x00091018, 'LT', 50 * u'Калинка,')
        ds.add_new(0x00091019, 'UC', 'LONG CODE VALUE')
        ds.add_new(0x0009101a, 'UN', b'\x0102\x3040\x5060')
        ds.add_new(0x0009101b, 'UR', 'https://example.com')
        ds.add_new(0x0009101c, 'AT', [0x00100010, 0x00100020])
        ds.add_new(0x0009101d, 'ST', 100 * u'علي بابا')
        ds.add_new(0x0009101e, 'SH', u'Διονυσιος')
        ds.add_new(0x00090011, 'LO', 'Creator 2.0')
        ds.add_new(0x00091101, 'SH', 'Version2')
        ds.add_new(0x00091102, 'US', 2)

        json_string = ds.to_json(bulk_data_threshold=100)
        json_model = json.loads(json_string)

        assert json_model['00080005']['Value'] == ['ISO_IR 100']
        assert json_model['00091007']['Value'] == ['1.2.3.4.5.6']
        assert json_model['0009100A']['Value'] == ['20200101115500.000000']
        assert json_model['0009100B']['Value'] == [3000000000]
        assert json_model['0009100C']['Value'] == [-2000000000]
        assert json_model['0009100D']['Value'] == [40000]
        assert json_model['0009100F']['Value'] == [3.14]
        assert json_model['00091010']['Value'] == [3.14159265]
        assert json_model['00091018']['Value'] == [50 * u'Калинка,']

        ds2 = Dataset.from_json(json_string)
        assert ds == ds2
        ds2 = Dataset.from_json(json_model)
        assert ds == ds2

        json_model2 = ds.to_json_dict(bulk_data_threshold=100)
        if compat.in_py2:
            # in Python 2, the encoding of this is slightly different
            # (single vs double quotation marks)
            del json_model['00091015']
            del json_model2['00091015']

        assert json_model == json_model2
예제 #60
0
    def test_to_json(self):
        ds = Dataset()
        ds.add_new(0x00091001, 'AT', [0x00100010, 0x00100020])
        ds.add_new(0x00091002, 'AT', Tag(0x28, 0x02))
        ds.add_new(0x00091003, 'AT', BaseTag(0x00280002))
        ds.add_new(0x00091004, 'AT', [0x00280002, Tag('PatientName')])
        ds_json = ds.to_json_dict()

        assert ['00100010', '00100020'] == ds_json['00091001']['Value']
        assert ['00280002'] == ds_json['00091002']['Value']
        assert ['00280002'] == ds_json['00091003']['Value']
        assert ['00280002', '00100010'] == ds_json['00091004']['Value']