def test_charset_patient_names(self, filename, patient_name): """Test patient names are correctly decoded and encoded.""" # check that patient names are correctly read file_path = get_charset_files(filename + '.dcm')[0] ds = dcmread(file_path) ds.decode() assert patient_name == ds.PatientName # check that patient names are correctly written back fp = DicomBytesIO() fp.is_implicit_VR = False fp.is_little_endian = True ds.save_as(fp, write_like_original=False) fp.seek(0) ds = dcmread(fp) assert patient_name == ds.PatientName # check that patient names are correctly written back # without original byte string (PersonName3 only) if hasattr(ds.PatientName, 'original_string'): ds.PatientName.original_string = None fp = DicomBytesIO() fp.is_implicit_VR = False fp.is_little_endian = True ds.save_as(fp, write_like_original=False) fp.seek(0) ds = dcmread(fp) assert patient_name == ds.PatientName
def test_write_tag(self): """Test DicomIO.write_tag indirectly""" tag = Tag(0x01020304) # Little endian fp = DicomBytesIO() fp.is_little_endian = True fp.write_tag(tag) assert fp.getvalue() == b'\x02\x01\x04\x03' # Big endian fp = DicomBytesIO() fp.is_little_endian = False fp.write_tag(tag) assert fp.getvalue() == b'\x01\x02\x03\x04'
def test_zero_length(self): """Test reading BOT with zero length""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x00\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert [0] == get_frame_offsets(fp)
def test_encapsulate_single_fragment_per_frame_bot(self): """Test encapsulating single fragment per frame with BOT values.""" ds = dcmread(JP2K_10FRAME_NOBOT) frames = decode_data_sequence(ds.PixelData) assert len(frames) == 10 data = encapsulate(frames, fragments_per_frame=1, has_bot=True) test_frames = decode_data_sequence(data) for a, b in zip(test_frames, frames): assert a == b fp = DicomBytesIO(data) fp.is_little_endian = True offsets = get_frame_offsets(fp) assert offsets == [ 0x0000, # 0 0x0eee, # 3822 0x1df6, # 7670 0x2cf8, # 11512 0x3bfc, # 15356 0x4ade, # 19166 0x59a2, # 22946 0x6834, # 26676 0x76e2, # 30434 0x8594 # 34196 ]
def test_deferred_data_element_deprecated(): """Test the deprecation warning is working""" fp = DicomBytesIO() fp.is_little_endian = True fp.is_implicit_VR = True with pytest.deprecated_call(): elem = DeferredDataElement(0x00000000, 'UL', fp, 0, 0, 4)
def test_read_be_tag(self): """Test DicomIO.read_be_tag indirectly""" # Tags are 2 + 2 = 4 bytes bytestream = b'\x01\x02\x03\x04\x05\x06' fp = DicomBytesIO(bytestream) fp.is_little_endian = False assert Tag(fp.read_be_tag()) == 0x01020304
def encode(ds, is_implicit_VR, is_little_endian): """ Given a pydicom Dataset, encode it to a byte stream Parameters ---------- ds - pydicom.dataset.Dataset The dataset to encode is_implicit_VR - bool Transfer syntax implicit/explicit VR is_little_endian - bool Transfer syntax byte ordering Returns ------- bytes or None The encoded dataset (if successful), None if encoding failed. """ f = DicomBytesIO() f.is_implicit_VR = is_implicit_VR f.is_little_endian = is_little_endian try: write_dataset(f, ds) except Exception as e: logger.error("pydicom.write_dataset() failed:") logger.error(e) f.close() return None rawstr = f.parent.getvalue() f.close() return rawstr
def test_single_frame(self): """Test reading single-frame BOT item""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x00\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert [0] == get_frame_offsets(fp)
def encode(ds, is_implicit_VR, is_little_endian): f = DicomBytesIO() f.is_implicit_VR = is_implicit_VR f.is_little_endian = is_little_endian write_dataset(f, ds) rawstr = f.parent.getvalue() f.close() return rawstr
def test_read_exact_length_raises(self): """Test DicomIO.read exact length raises if short""" fp = DicomBytesIO(b'\x00\x01\x03') fp.is_little_endian = True with pytest.raises(EOFError, match="Unexpected end of file. Read 3 bytes of 4 " "expected starting at position 0x0"): fp.read(length=4, need_exact_length=True)
def encode_element(el, is_implicit_VR, is_little_endian): f = DicomBytesIO() f.is_implicit_VR = is_implicit_VR f.is_little_endian = is_little_endian write_data_element(f, el) rawstr = f.parent.getvalue() f.close() return rawstr
def test_single_fragment_no_delimiter(self): """Test single fragment is returned OK""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert read_item(fp) == b'\x01\x00\x00\x00'
def test_read_le_ul(self): """Test DicomIO.read_leUL indirectly""" # UL are 4 bytes fixed bytestream = b'\x00\x00\x00\x00\xFF\xFF\x00\x00\xFE\xFF\xFF\xFF' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert fp.read_leUL() == 0 assert fp.read_leUL() == 0xFFFF assert fp.read_leUL() == 0xFFFFFFFE
def test_not_little_endian(self): """Test reading big endian raises exception""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x00\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = False with pytest.raises(ValueError, match="'fp.is_little_endian' must be True"): get_frame_offsets(fp)
def test_read_be_us(self): """Test DicomIO.read_beUS indirectly""" # US are 2 bytes fixed bytestream = b'\x00\x00\x00\xFF\xFF\xFE' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert fp.read_beUS() == 0 assert fp.read_beUS() == 255 assert fp.read_beUS() == 0xFFFE
def test_single_fragment_no_delimiter(self): """Test single fragment is returned OK""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True fragments = generate_pixel_data_fragment(fp) assert next(fragments) == b'\x01\x00\x00\x00' pytest.raises(StopIteration, next, fragments)
def test_write_be_us(self): """Test DicomIO.write_beUS indirectly""" fp = DicomBytesIO() fp.is_little_endian = False assert fp.getvalue() == b'' fp.write_beUS(0) assert fp.getvalue() == b'\x00\x00' fp.write_beUS(255) assert fp.getvalue() == b'\x00\x00\x00\xFF' fp.write_beUS(65534) assert fp.getvalue() == b'\x00\x00\x00\xFF\xFF\xFE'
def test_multi_frame(self): """Test reading multi-frame BOT item""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x10\x00\x00\x00' \ b'\x00\x00\x00\x00' \ b'\x66\x13\x00\x00' \ b'\xF4\x25\x00\x00' \ b'\xFE\x37\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert [0, 4966, 9716, 14334] == get_frame_offsets(fp)
def test_not_little_endian(self): """Test reading big endian raises exception""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = False fragments = generate_pixel_data_fragment(fp) assert_raises_regex(ValueError, "'fp.is_little_endian' must be True", next, fragments) pytest.raises(StopIteration, next, fragments)
def test_bad_length_multiple(self): """Test raises exception if the item length is not a multiple of 4.""" # Length 10 bytestream = b'\xFE\xFF\x00\xE0' \ b'\x0A\x00\x00\x00' \ b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert_raises_regex( ValueError, "The length of the Basic Offset Table item is not " "a multiple of 4.", get_frame_offsets, fp)
def test_bad_tag(self): """Test raises exception if no item tag.""" # (fffe,e100) bytestream = b'\xFE\xFF\x00\xE1' \ b'\x08\x00\x00\x00' \ b'\x01\x02\x03\x04\x05\x06\x07\x08' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert_raises_regex( ValueError, "Unexpected tag '\(fffe, e100\)' when " "parsing the Basic Table Offset item.", get_frame_offsets, fp)
def test_multi_frame(self): """Test reading multi-frame BOT item""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x10\x00\x00\x00' \ b'\x00\x00\x00\x00' \ b'\x66\x13\x00\x00' \ b'\xF4\x25\x00\x00' \ b'\xFE\x37\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert (True, [0, 4966, 9716, 14334]) == get_frame_offsets(fp)
def test_item_undefined_length(self): """Test exception raised if item length undefined.""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\xFF\xFF\xFF\xFF' \ b'\x00\x00\x00\x01' fp = DicomBytesIO(bytestream) fp.is_little_endian = True with pytest.raises(ValueError, match="Encapsulated data fragment had Undefined " "Length at data position 0x4"): read_item(fp)
def test_multi_fragments_no_delimiter(self): """Test multi fragments are returned OK""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x06\x00\x00\x00' \ b'\x01\x02\x03\x04\x05\x06' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert 2 == get_nr_fragments(fp)
def test_single_fragment_delimiter(self): """Test single fragment is returned OK with sequence delimiter item""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\xFE\xFF\xDD\xE0' fp = DicomBytesIO(bytestream) fp.is_little_endian = True fragments = generate_pixel_data_fragment(fp) assert next(fragments) == b'\x01\x00\x00\x00' pytest.raises(StopIteration, next, fragments)
def encode(ds: Dataset, is_implicit_vr: bool, is_little_endian: bool, deflated: bool = False) -> Optional[bytes]: """Encode a *pydicom* :class:`~pydicom.dataset.Dataset` `ds`. .. versionchanged:: 1.5 Added `deflated` keyword parameter Parameters ---------- ds : pydicom.dataset.Dataset The dataset to encode is_implicit_vr : bool The element encoding scheme the dataset will be encoded with, ``True`` for implicit VR, ``False`` for explicit VR. is_little_endian : bool The byte ordering the dataset will be encoded in, ``True`` for little endian, ``False`` for big endian. deflated : bool, optional ``True`` if the dataset is to be encoded using *Deflated Explicit VR Little Endian* transfer syntax (default ``False``). Returns ------- bytes or None The encoded dataset as :class:`bytes` (if successful) or ``None`` if the encoding failed. """ # pylint: disable=broad-except fp = DicomBytesIO() fp.is_implicit_VR = is_implicit_vr fp.is_little_endian = is_little_endian try: write_dataset(fp, ds) except Exception as exc: LOGGER.error("pydicom.write_dataset() failed:") LOGGER.exception(exc) fp.close() return None bytestring: bytes = fp.parent.getvalue() # type: ignore fp.close() if deflated: # Compress the encoded dataset compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) bytestring = compressor.compress(bytestring) bytestring += compressor.flush() bytestring += b'\x00' if len(bytestring) % 2 else b'' return bytestring
def test_bad_length_multiple(self): """Test raises exception if the item length is not a multiple of 4.""" # Length 10 bytestream = b'\xFE\xFF\x00\xE0' \ b'\x0A\x00\x00\x00' \ b'\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A' fp = DicomBytesIO(bytestream) fp.is_little_endian = True with pytest.raises(ValueError, match="The length of the Basic Offset Table item" " is not a multiple of 4."): get_frame_offsets(fp)
def test_write_be_ul(self): """Test DicomIO.write_beUL indirectly""" fp = DicomBytesIO() fp.is_little_endian = False assert fp.getvalue() == b'' fp.write_beUL(0) assert fp.getvalue() == b'\x00\x00\x00\x00' fp.write_beUL(65535) assert fp.getvalue() == b'\x00\x00\x00\x00\x00\x00\xFF\xFF' fp.write_beUL(4294967294) assert fp.getvalue() == ( b'\x00\x00\x00\x00\x00\x00\xFF\xFF\xFF\xFF\xFF\xFE')
def test_multi_fragments_no_delimiter(self): """Test multi fragments are returned OK""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x06\x00\x00\x00' \ b'\x01\x02\x03\x04\x05\x06' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert read_item(fp) == b'\x01\x00\x00\x00' assert read_item(fp) == b'\x01\x02\x03\x04\x05\x06'
def test_bad_tag(self): """Test raises exception if no item tag.""" # (fffe,e100) bytestream = b'\xFE\xFF\x00\xE1' \ b'\x08\x00\x00\x00' \ b'\x01\x02\x03\x04\x05\x06\x07\x08' fp = DicomBytesIO(bytestream) fp.is_little_endian = True with pytest.raises(ValueError, match="Unexpected tag '\(fffe, e100\)' when " "parsing the Basic Table Offset item."): get_frame_offsets(fp)
def test_not_little_endian(self): """Test reading big endian raises exception""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = False fragments = generate_pixel_data_fragment(fp) with pytest.raises(ValueError, match="'fp.is_little_endian' must be True"): next(fragments) pytest.raises(StopIteration, next, fragments)
def test_item_sequence_delimiter(self): """Test that the fragments are returned if seq delimiter hit.""" bytestream = (b'\xFE\xFF\x00\xE0' b'\x04\x00\x00\x00' b'\x01\x00\x00\x00' b'\xFE\xFF\xDD\xE0' b'\x00\x00\x00\x00' b'\xFE\xFF\x00\xE0' b'\x04\x00\x00\x00' b'\x02\x00\x00\x00') fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert 1 == get_nr_fragments(fp)
def test_multi_fragments_delimiter(self): """Test multi fragments are returned OK with sequence delimiter item""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x06\x00\x00\x00' \ b'\x01\x02\x03\x04\x05\x06' \ b'\xFE\xFF\xDD\xE0' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert read_item(fp) == b'\x01\x00\x00\x00' assert read_item(fp) == b'\x01\x02\x03\x04\x05\x06'
def test_item_undefined_length(self): """Test exception raised if item length undefined.""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\xFF\xFF\xFF\xFF' \ b'\x00\x00\x00\x01' fp = DicomBytesIO(bytestream) fp.is_little_endian = True fragments = generate_pixel_data_fragment(fp) assert_raises_regex( ValueError, "Undefined item length at offset 4 when " "parsing the encapsulated pixel data " "fragments.", next, fragments) pytest.raises(StopIteration, next, fragments)
def decode(bytestring, is_implicit_vr, is_little_endian, deflated=False): """Decode `bytestring` to a *pydicom* :class:`~pydicom.dataset.Dataset`. .. versionchanged:: 1.5 Added `deflated` keyword parameter Parameters ---------- byestring : io.BytesIO The encoded dataset in the DIMSE Message sent from the peer AE. is_implicit_vr : bool The dataset is encoded as implicit (``True``) or explicit VR (``False``). is_little_endian : bool The byte ordering of the encoded dataset, ``True`` for little endian, ``False`` for big endian. deflated : bool, optional ``True`` if the dataset has been encoded using *Deflated Explicit VR Little Endian* transfer syntax (default ``False``). Returns ------- pydicom.dataset.Dataset The decoded dataset. """ ## Logging transfer_syntax = '' if deflated: transfer_syntax = "Deflated " transfer_syntax += "Little Endian" if is_little_endian else "Big Endian" if is_implicit_vr: transfer_syntax += " Implicit" else: transfer_syntax += " Explicit" LOGGER.debug('pydicom.read_dataset() TransferSyntax="%s"', transfer_syntax) # Rewind to the start of the stream bytestring.seek(0) if deflated: # Decompress the dataset bytestring = DicomBytesIO( zlib.decompress(bytestring.getvalue(), -zlib.MAX_WBITS)) bytestring.is_implicit_VR = is_implicit_vr bytestring.is_little_endian = is_little_endian # Decode the dataset return read_dataset(bytestring, is_implicit_vr, is_little_endian)
def test_multi_fragments_no_delimiter(self): """Test multi fragments are returned OK""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x06\x00\x00\x00' \ b'\x01\x02\x03\x04\x05\x06' fp = DicomBytesIO(bytestream) fp.is_little_endian = True fragments = generate_pixel_data_fragment(fp) assert next(fragments) == b'\x01\x00\x00\x00' assert next(fragments) == b'\x01\x02\x03\x04\x05\x06' pytest.raises(StopIteration, next, fragments)
def test_item_undefined_length(self): """Test exception raised if item length undefined.""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\xFF\xFF\xFF\xFF' \ b'\x00\x00\x00\x01' fp = DicomBytesIO(bytestream) fp.is_little_endian = True fragments = generate_pixel_data_fragment(fp) with pytest.raises(ValueError, match="Undefined item length at offset 4 when " "parsing the encapsulated pixel data " "fragments."): next(fragments) pytest.raises(StopIteration, next, fragments)
def test_item_sequence_delimiter_zero_length(self): """Test that the fragments are returned if seq delimiter hit.""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\xFE\xFF\xDD\xE0' \ b'\x00\x00\x00\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x02\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert read_item(fp) == b'\x01\x00\x00\x00' assert read_item(fp) is None assert read_item(fp) == b'\x02\x00\x00\x00'
def test_item_sequence_delimiter(self): """Test non-zero length seq delimiter reads correctly.""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\xFE\xFF\xDD\xE0' \ b'\x04\x00\x00\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x02\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert read_item(fp) == b'\x01\x00\x00\x00' assert read_item(fp) is None assert read_item(fp) == b'\x02\x00\x00\x00'
def test_item_sequence_delimiter(self): """Test that the fragments are returned if seq delimiter hit.""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\xFE\xFF\xDD\xE0' \ b'\x00\x00\x00\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x02\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True fragments = generate_pixel_data_fragment(fp) assert next(fragments) == b'\x01\x00\x00\x00' pytest.raises(StopIteration, next, fragments)
def test_empty_sequence_is_handled_as_array(self): ds = Dataset() ds.AcquisitionContextSequence = [] elem = ds['AcquisitionContextSequence'] assert bool(elem.value) is False assert 0 == elem.VM assert elem.value == [] fp = DicomBytesIO() fp.is_little_endian = True fp.is_implicit_VR = True filewriter.write_dataset(fp, ds) ds_read = dcmread(fp, force=True) elem = ds_read['AcquisitionContextSequence'] assert 0 == elem.VM assert elem.value == []
def test_japanese_multi_byte_personname(self): """Test japanese person name which has multi byte strings are correctly encoded.""" file_path = get_charset_files('chrH32.dcm')[0] ds = dcmread(file_path) ds.decode() if hasattr(ds.PatientName, 'original_string'): original_string = ds.PatientName.original_string ds.PatientName.original_string = None fp = DicomBytesIO() fp.is_implicit_VR = False fp.is_little_endian = True ds.save_as(fp, write_like_original=False) fp.seek(0) ds_out = dcmread(fp) assert original_string == ds_out.PatientName.original_string
def test_item_bad_tag(self): """Test item is read if it has an unexpected tag""" # This should raise an exception instead bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\x10\x00\x10\x00' \ b'\x04\x00\x00\x00' \ b'\xFF\x00\xFF\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x02\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert read_item(fp) == b'\x01\x00\x00\x00' assert read_item(fp) == b'\xFF\x00\xFF\x00' assert read_item(fp) == b'\x02\x00\x00\x00'
def test_item_bad_tag(self): """Test exception raised if item has unexpected tag""" bytestream = (b'\xFE\xFF\x00\xE0' b'\x04\x00\x00\x00' b'\x01\x00\x00\x00' b'\x10\x00\x10\x00' b'\x00\x00\x00\x00' b'\xFE\xFF\x00\xE0' b'\x04\x00\x00\x00' b'\x02\x00\x00\x00') fp = DicomBytesIO(bytestream) fp.is_little_endian = True msg = ( r"Unexpected tag '\(0010, 0010\)' at offset 12 when parsing the " r"encapsulated pixel data fragment items.") with pytest.raises(ValueError, match=msg): get_nr_fragments(fp)
def test_lut_descriptor(self): """Regression test for #942: incorrect first value""" prefixes = [ b'\x28\x00\x01\x11', b'\x28\x00\x02\x11', b'\x28\x00\x03\x11', b'\x28\x00\x02\x30' ] suffix = b'\x53\x53\x06\x00\x00\xf5\x00\xf8\x10\x00' for raw_tag in prefixes: tag = unpack('<2H', raw_tag) bs = DicomBytesIO(raw_tag + suffix) bs.is_little_endian = True bs.is_implicit_VR = False ds = dcmread(bs, force=True) elem = ds[tag] assert elem.VR == 'SS' assert elem.value == [62720, -2048, 16]
def test_item_bad_tag(self): """Test exception raised if item has unexpected tag""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' \ b'\x10\x00\x10\x00' \ b'\x00\x00\x00\x00' \ b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x02\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True fragments = generate_pixel_data_fragment(fp) assert next(fragments) == b'\x01\x00\x00\x00' assert_raises_regex( ValueError, "Unexpected tag '\(0010, 0010\)' at offset 12 " "when parsing the encapsulated pixel data " "fragment items.", next, fragments) pytest.raises(StopIteration, next, fragments)
def decode_data_sequence(data): """Read encapsulated data and return a list of strings data -- string of encapsulated data, typically dataset.PixelData Return all fragments in a list of byte strings """ # Convert data into a memory-mapped file fp = DicomBytesIO(data) fp.is_little_endian = True # DICOM standard requires this BasicOffsetTable = read_item(fp) # NOQA seq = [] while True: item = read_item(fp) if not item: # None is returned if get to Sequence Delimiter break seq.append(item) # XXX should return seq
def defragment_data(data): """Read encapsulated data and return one continuous string data -- string of encapsulated data, typically dataset.PixelData Return all fragments concatenated together as a byte string If PixelData has multiple frames, then should separate out before calling this routine. """ # Convert data into a memory-mapped file fp = DicomBytesIO(data) fp.is_little_endian = True # DICOM standard requires this BasicOffsetTable = read_item(fp) # NOQA seq = [] while True: item = read_item(fp) if not item: # None is returned if get to Sequence Delimiter break seq.append(item) # XXX should return "".join(seq)
def encode_element(elem, is_implicit_VR=True, is_little_endian=True): """Return the encoded `elem`. Parameters ---------- elem : pydicom.dataelem.DataElement The element to encode is_implicit_VR : bool Encode using implicit VR, default True is_little_endian : bool Encode using little endian, default True Returns ------- str or bytes The encoded element as str (python2) or bytes (python3) """ fp = DicomBytesIO() fp.is_implicit_VR = is_implicit_VR fp.is_little_endian = is_little_endian write_data_element(fp, elem) byte_string = fp.parent.getvalue() fp.close() return byte_string
def encode(ds, is_implicit_vr, is_little_endian): """Encode a *pydicom* :class:`~pydicom.dataset.Dataset` `ds`. Parameters ---------- ds : pydicom.dataset.Dataset The dataset to encode is_implicit_vr : bool The element encoding scheme the dataset will be encoded with, ``True`` for implicit VR, ``False`` for explicit VR. is_little_endian : bool The byte ordering the dataset will be encoded in, ``True`` for little endian, ``False`` for big endian. Returns ------- bytes or None The encoded dataset as :class:`bytes` (if successful) or ``None`` if the encoding failed. """ # pylint: disable=broad-except fp = DicomBytesIO() fp.is_implicit_VR = is_implicit_vr fp.is_little_endian = is_little_endian try: write_dataset(fp, ds) except Exception as exc: LOGGER.error("pydicom.write_dataset() failed:") LOGGER.exception(exc) fp.close() return None bytestring = fp.parent.getvalue() fp.close() return bytestring
def write_file_meta_info(fp, file_meta, enforce_standard=True): """Write the File Meta Information elements in `file_meta` to `fp`. If `enforce_standard` is ``True`` then the file-like `fp` should be positioned past the 128 byte preamble + 4 byte prefix (which should already have been written). **DICOM File Meta Information Group Elements** From the DICOM standard, Part 10, :dcm:`Section 7.1<part10/chapter_7.html#sect_7.1>`, any DICOM file shall contain a 128-byte preamble, a 4-byte DICOM prefix 'DICM' and (at a minimum) the following Type 1 DICOM Elements (from :dcm:`Table 7.1-1<part10/chapter_7.html#table_7.1-1>`): * (0002,0000) *File Meta Information Group Length*, UL, 4 * (0002,0001) *File Meta Information Version*, OB, 2 * (0002,0002) *Media Storage SOP Class UID*, UI, N * (0002,0003) *Media Storage SOP Instance UID*, UI, N * (0002,0010) *Transfer Syntax UID*, UI, N * (0002,0012) *Implementation Class UID*, UI, N If `enforce_standard` is ``True`` then (0002,0000) will be added/updated, (0002,0001) and (0002,0012) will be added if not already present and the other required elements will be checked to see if they exist. If `enforce_standard` is ``False`` then `file_meta` will be written as is after minimal validation checking. The following Type 3/1C Elements may also be present: * (0002,0013) *Implementation Version Name*, SH, N * (0002,0016) *Source Application Entity Title*, AE, N * (0002,0017) *Sending Application Entity Title*, AE, N * (0002,0018) *Receiving Application Entity Title*, AE, N * (0002,0102) *Private Information*, OB, N * (0002,0100) *Private Information Creator UID*, UI, N If `enforce_standard` is ``True`` then (0002,0013) will be added/updated. *Encoding* The encoding of the *File Meta Information* shall be *Explicit VR Little Endian*. Parameters ---------- fp : file-like The file-like to write the File Meta Information to. file_meta : pydicom.dataset.Dataset The File Meta Information elements. enforce_standard : bool If ``False``, then only the *File Meta Information* elements already in `file_meta` will be written to `fp`. If ``True`` (default) then a DICOM Standards conformant File Meta will be written to `fp`. Raises ------ ValueError If `enforce_standard` is ``True`` and any of the required *File Meta Information* elements are missing from `file_meta`, with the exception of (0002,0000), (0002,0001) and (0002,0012). ValueError If any non-Group 2 Elements are present in `file_meta`. """ validate_file_meta(file_meta, enforce_standard) if enforce_standard and 'FileMetaInformationGroupLength' not in file_meta: # Will be updated with the actual length later file_meta.FileMetaInformationGroupLength = 0 # Write the File Meta Information Group elements # first write into a buffer to avoid seeking back, that can be # expansive and is not allowed if writing into a zip file buffer = DicomBytesIO() buffer.is_little_endian = True buffer.is_implicit_VR = False write_dataset(buffer, file_meta) # If FileMetaInformationGroupLength is present it will be the first written # element and we must update its value to the correct length. if 'FileMetaInformationGroupLength' in file_meta: # Update the FileMetaInformationGroupLength value, which is the number # of bytes from the end of the FileMetaInformationGroupLength element # to the end of all the File Meta Information elements. # FileMetaInformationGroupLength has a VR of 'UL' and so has a value # that is 4 bytes fixed. The total length of when encoded as # Explicit VR must therefore be 12 bytes. file_meta.FileMetaInformationGroupLength = buffer.tell() - 12 buffer.seek(0) write_data_element(buffer, file_meta[0x00020000]) fp.write(buffer.getvalue())