def test_charset_patient_names(self, filename, patient_name): """Test patient names are correctly decoded and encoded.""" # check that patient names are correctly read file_path = get_charset_files(filename + '.dcm')[0] ds = dcmread(file_path) ds.decode() assert patient_name == ds.PatientName # check that patient names are correctly written back fp = DicomBytesIO() fp.is_implicit_VR = False fp.is_little_endian = True ds.save_as(fp, write_like_original=False) fp.seek(0) ds = dcmread(fp) assert patient_name == ds.PatientName # check that patient names are correctly written back # without original byte string (PersonName3 only) if hasattr(ds.PatientName, 'original_string'): ds.PatientName.original_string = None fp = DicomBytesIO() fp.is_implicit_VR = False fp.is_little_endian = True ds.save_as(fp, write_like_original=False) fp.seek(0) ds = dcmread(fp) assert patient_name == ds.PatientName
def test_equal(): dicom1 = DicomBase.from_dict({ 'Manufacturer': 'PyMedPhys', 'PatientName': 'Python^Monte' }) dicom2 = DicomBase.from_dict({ 'Manufacturer': 'PyMedPhys', 'PatientName': 'Python^Monte' }) assert dicom1 == dicom2 # Equality from dict try: fp1 = DicomBytesIO() dicom1.to_file(fp1) fp2 = DicomBytesIO() dicom2.to_file(fp2) dicom1_from_file = DicomBase.from_file(fp1) dicom2_from_file = DicomBase.from_file(fp2) # Equality from file (implicitly also from dataset) assert dicom1_from_file == dicom2_from_file dicom1_from_file.dataset.PatientName = 'test^PatientName change' assert dicom1_from_file != dicom2_from_file # Negative case dicom1_from_file.dataset.PatientName = 'Python^Monte' assert dicom1_from_file == dicom2_from_file # Equality post re-assignment dicom1_from_file_copied = deepcopy(dicom1_from_file) assert dicom1_from_file == dicom1_from_file_copied # Equality from deepcopy finally: fp1.close() fp2.close()
def test_write_tag(self): """Test DicomIO.write_tag indirectly""" tag = Tag(0x01020304) # Little endian fp = DicomBytesIO() fp.is_little_endian = True fp.write_tag(tag) assert fp.getvalue() == b'\x02\x01\x04\x03' # Big endian fp = DicomBytesIO() fp.is_little_endian = False fp.write_tag(tag) assert fp.getvalue() == b'\x01\x02\x03\x04'
def test_seq_item_looks_like_explicit_VR(self): # For issue 999. # Set up an implicit VR dataset with a "normal" group 8 tag, # followed by a sequence with an item (dataset) having # a data element length that looks like a potential valid VR ds = Dataset() ds.file_meta = Dataset() ds.file_meta.MediaStorageSOPClassUID = "1.1.1" ds.file_meta.MediaStorageSOPInstanceUID = "2.2.2" ds.is_implicit_VR = True ds.is_little_endian = True ds.SOPClassUID = '9.9.9' # First item group 8 in top-level dataset seq = Sequence() seq_ds = Dataset() seq_ds.BadPixelImage = b"\3" * 0x5244 # length looks like "DR" seq.append(seq_ds) ds.ReferencedImageSequence = seq dbio = DicomBytesIO() ds.save_as(dbio, write_like_original=False) # Now read the constructed dataset back in # In original issue, shows warning that has detected what appears # to be Explicit VR, then throws NotImplemented for the unknown VR dbio.seek(0) ds = dcmread(dbio) ds.remove_private_tags() # forces it to actually parse SQ
def dropzone_handler(): files_list = [] done = False # check if done value exists done = bool('done' in request.form.to_dict()) ## request files to list of binary objects for key, f in request.files.items(): if key.startswith('file'): files_list.append(f) file_count = len(files_list) binary_files = [file.read() for file in files_list] # list of bytes objects if not done: # add to overall list, but not database global all_files, file_count_global all_files.extend(binary_files) file_count_global += file_count return '' else: # add all files from post requests to database as one object dicom_list = [] for byte_file in all_files: # list of all dicom files in binary # convert to dicom object raw = DicomBytesIO(byte_file) dicom_object = dcmread(raw) dicom_list.append(dicom_object) # find median dicom file to generate thumbnail median_i = len(dicom_list) // 2 median_dicom = dicom_list[median_i] ### generate thumbnail from median file tn_bytes = dicom_to_thumbnail(median_dicom) binary_blob = pickle.dumps(all_files) print("Session dropzone handler", session['id']) ## database upload batch = Dicom(user_id=current_user.id, dicom_stack=binary_blob, thumbnail=tn_bytes, file_count=file_count_global, session_id=str(session['id'])) db.session.add(batch) db.session.commit() # set globals back to default values all_files = [] file_count_global = 0 return 'dropzone done'
def test_deferred_data_element_deprecated(): """Test the deprecation warning is working""" fp = DicomBytesIO() fp.is_little_endian = True fp.is_implicit_VR = True with pytest.deprecated_call(): elem = DeferredDataElement(0x00000000, 'UL', fp, 0, 0, 4)
def decode_data_sequence(data): """Read encapsulated data and return a list of strings. Parameters ---------- data : bytes or str The encapsulated data, typically the value from ``Dataset.PixelData``. Returns ------- list of bytes All fragments as a list of ``bytes``. """ # Convert data into a memory-mapped file with DicomBytesIO(data) as fp: # DICOM standard requires this fp.is_little_endian = True BasicOffsetTable = read_item(fp) # NOQA seq = [] while True: item = read_item(fp) # None is returned if get to Sequence Delimiter if not item: break seq.append(item) # XXX should return seq
def datasetToBinary(ds: Dataset): fix_meta_info(ds) with DicomBytesIO() as dcmfile: dcmwrite(dcmfile, ds, write_like_original=False) dcmfile.seek(0) return dcmfile.read()
def test_zero_length(self): """Test reading BOT with zero length""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x00\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert [0] == get_frame_offsets(fp)
def test_read_known_private_tag_implicit(self): fp = DicomBytesIO() ds = Dataset() ds.is_implicit_VR = True ds.is_little_endian = True ds[0x00410010] = RawDataElement(Tag(0x00410010), "LO", 8, b"ACME 3.2", 0, True, True) ds[0x00411001] = RawDataElement(Tag(0x00411001), "US", 2, b"\x2A\x00", 0, True, True) ds[0x00431001] = RawDataElement(Tag(0x00431001), "SH", 8, b"Unknown ", 0, True, True) ds.save_as(fp) ds = dcmread(fp, force=True) elem = ds[0x00411001] assert elem.VR == "UN" assert elem.name == "Private tag data" assert elem.value == b"\x2A\x00" add_private_dict_entry("ACME 3.2", 0x00410001, "US", "Some Number") ds = dcmread(fp, force=True) elem = ds[0x00411001] assert elem.VR == "US" assert elem.name == "[Some Number]" assert elem.value == 42 # Unknown private tag is handled as before elem = ds[0x00431001] assert elem.VR == "UN" assert elem.name == "Private tag data" assert elem.value == b"Unknown "
def test_save_as(self): """Test Dataset.save_as""" fp = DicomBytesIO() ds = Dataset() ds.PatientName = 'CITIZEN' # Raise AttributeError if is_implicit_VR or is_little_endian missing self.assertRaises(AttributeError, ds.save_as, fp, write_like_original=False) ds.is_implicit_VR = True self.assertRaises(AttributeError, ds.save_as, fp, write_like_original=False) ds.is_little_endian = True del ds.is_implicit_VR self.assertRaises(AttributeError, ds.save_as, fp, write_like_original=False) ds.is_implicit_VR = True ds.file_meta = Dataset() ds.file_meta.MediaStorageSOPClassUID = '1.1' ds.file_meta.MediaStorageSOPInstanceUID = '1.2' ds.file_meta.TransferSyntaxUID = '1.3' ds.file_meta.ImplementationClassUID = '1.4' ds.save_as(fp, write_like_original=False)
def test_read_be_tag(self): """Test DicomIO.read_be_tag indirectly""" # Tags are 2 + 2 = 4 bytes bytestream = b'\x01\x02\x03\x04\x05\x06' fp = DicomBytesIO(bytestream) fp.is_little_endian = False assert Tag(fp.read_be_tag()) == 0x01020304
def encode_element(elem, is_implicit_vr=True, is_little_endian=True): """Encode a *pydicom* :class:`~pydicom.dataelem.DataElement` `elem`. .. deprecated:: 1.5 Will be removed in version 2.0, use *pydicom* instead. Parameters ---------- elem : pydicom.dataelem.DataElement The element to encode. is_implicit_vr : bool, optional The element encoding scheme the element will be encoded with, ``True`` for implicit VR (default), ``False`` for explicit VR. is_little_endian : bool, optional The byte ordering the element will be encoded in, ``True`` for little endian (default), ``False`` for big endian. Returns ------- bytes The encoded element. """ fp = DicomBytesIO() fp.is_implicit_VR = is_implicit_vr fp.is_little_endian = is_little_endian write_data_element(fp, elem) bytestring = fp.parent.getvalue() fp.close() return bytestring
async def readFile(filename): with DicomBytesIO() as dcmfile: async with aiofiles.open(filename, mode='rb') as f: dcmfile.write(await f.read()) dcmfile.seek(0) ds = dcmread(dcmfile) return ds
def encode_element(elem, is_implicit_vr=True, is_little_endian=True): """Encode a pydicom DataElement `elem` to a byte stream. The default is to encode the element as implicit VR little endian. Parameters ---------- elem : pydicom.dataelem.DataElement The element to encode is_implicit_vr : bool, optional The element encoding scheme the element will be encoded with, default is True. is_little_endian : bool, optional The byte ordering the element will be encoded in, default is True. Returns ------- bytes The encoded element. """ fp = DicomBytesIO() fp.is_implicit_VR = is_implicit_vr fp.is_little_endian = is_little_endian write_data_element(fp, elem) bytestring = fp.parent.getvalue() fp.close() return bytestring
def test_encapsulate_single_fragment_per_frame_bot(self): """Test encapsulating single fragment per frame with BOT values.""" ds = dcmread(JP2K_10FRAME_NOBOT) frames = decode_data_sequence(ds.PixelData) assert len(frames) == 10 data = encapsulate(frames, fragments_per_frame=1, has_bot=True) test_frames = decode_data_sequence(data) for a, b in zip(test_frames, frames): assert a == b fp = DicomBytesIO(data) fp.is_little_endian = True length, offsets = get_frame_offsets(fp) assert offsets == [ 0x0000, # 0 0x0eee, # 3822 0x1df6, # 7670 0x2cf8, # 11512 0x3bfc, # 15356 0x4ade, # 19166 0x59a2, # 22946 0x6834, # 26676 0x76e2, # 30434 0x8594 # 34196 ]
def encode(ds, is_implicit_vr, is_little_endian): """Encode a pydicom Dataset `ds` to a byte stream. Parameters ---------- ds : pydicom.dataset.Dataset The dataset to encode is_implicit_vr : bool The element encoding scheme the dataset will be encoded with. is_little_endian : bool The byte ordering the dataset will be encoded in. Returns ------- bytes or None The encoded dataset (if successful), None if the encoding failed. """ # pylint: disable=broad-except fp = DicomBytesIO() fp.is_implicit_VR = is_implicit_vr fp.is_little_endian = is_little_endian try: write_dataset(fp, ds) except Exception as ex: LOGGER.error("pydicom.write_dataset() failed:") LOGGER.error(ex) fp.close() return None bytestring = fp.parent.getvalue() fp.close() return bytestring
def parse_wado_response(response: Response) -> Dataset: """Create a Dataset out of http response from WADO server Raises ------ DICOMTrolleyError If response is not as expected or if parsing fails Returns ------- Dataset """ if response.status_code != 200: raise DICOMTrolleyError( f"Calling {response.url} failed ({response.status_code} - " f"{response.reason})\n" f"response content was {str(response.content[:300])}") raw = DicomBytesIO(response.content) try: return dcmread(raw) except InvalidDicomError as e: raise DICOMTrolleyError( f"Error parsing response as dicom: {e}." f" Response content (first 300 elements) was" f" {str(response.content[:300])}") from e
def test_japanese_multi_byte_personname(self): """Test japanese person name which has multi byte strings are correctly encoded.""" file_path = get_charset_files('chrH32.dcm')[0] ds = dcmread(file_path) ds.decode() if hasattr(ds.PatientName, 'original_string'): original_string = ds.PatientName.original_string ds.PatientName.original_string = None fp = DicomBytesIO() fp.is_implicit_VR = False fp.is_little_endian = True ds.save_as(fp, write_like_original=False) fp.seek(0) ds_out = dcmread(fp) assert original_string == ds_out.PatientName.original_string japanese_pn = PersonName(u"Mori^Ogai=森^鷗外=もり^おうがい") pyencs = pydicom.charset.convert_encodings( ["ISO 2022 IR 6", "ISO 2022 IR 87", "ISO 2022 IR 159"]) actual_encoded = bytes(japanese_pn.encode(pyencs)) expect_encoded = ( b"\x4d\x6f\x72\x69\x5e\x4f\x67\x61\x69\x3d\x1b\x24\x42\x3f" b"\x39\x1b\x28\x42\x5e\x1b\x24\x28\x44\x6c\x3f\x1b\x24\x42" b"\x33\x30\x1b\x28\x42\x3d\x1b\x24\x42\x24\x62\x24\x6a\x1b" b"\x28\x42\x5e\x1b\x24\x42\x24\x2a\x24\x26\x24\x2c\x24\x24" b"\x1b\x28\x42") assert expect_encoded == actual_encoded
def decode_data_sequence(data): """Read encapsulated data and return a list of strings data -- string of encapsulated data, typically dataset.PixelData Return all fragments in a list of byte strings """ # Convert data into a memory-mapped file with DicomBytesIO(data) as fp: # DICOM standard requires this fp.is_little_endian = True BasicOffsetTable = read_item(fp) # NOQA seq = [] while True: item = read_item(fp) # None is returned if get to Sequence Delimiter if not item: break seq.append(item) # XXX should return seq
def test_read_exact_length_raises(self): """Test DicomIO.read exact length raises if short""" fp = DicomBytesIO(b'\x00\x01\x03') fp.is_little_endian = True with pytest.raises(EOFError, match="Unexpected end of file. Read 3 bytes of 4 " "expected starting at position 0x0"): fp.read(length=4, need_exact_length=True)
def test_single_frame(self): """Test reading single-frame BOT item""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x00\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert [0] == get_frame_offsets(fp)
def test_default_for_writing_validation_mode(self): ds = Dataset() ds.is_implicit_VR = True ds.is_little_endian = True ds.SpecificCharacterSet = "ISO_IR 192" ds.add(DataElement(0x00080050, "SH", "洪^吉洞=홍^길동")) with pytest.warns(UserWarning): ds.save_as(DicomBytesIO())
def test_not_little_endian(self): """Test reading big endian raises exception""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x00\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = False assert_raises_regex(ValueError, "'fp.is_little_endian' must be True", get_frame_offsets, fp)
def test_single_fragment_no_delimiter(self): """Test single fragment is returned OK""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x04\x00\x00\x00' \ b'\x01\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert read_item(fp) == b'\x01\x00\x00\x00'
def test_write_invalid_values(self, future_setter): ds = Dataset() ds.is_implicit_VR = True ds.is_little_endian = True ds.SpecificCharacterSet = "ISO_IR 192" ds.add(DataElement(0x00080050, "SH", "洪^吉洞=홍^길동")) with pytest.raises(ValueError): ds.save_as(DicomBytesIO())
def test_read_le_ul(self): """Test DicomIO.read_leUL indirectly""" # UL are 4 bytes fixed bytestream = b'\x00\x00\x00\x00\xFF\xFF\x00\x00\xFE\xFF\xFF\xFF' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert fp.read_leUL() == 0 assert fp.read_leUL() == 0xFFFF assert fp.read_leUL() == 0xFFFFFFFE
def test_not_little_endian(self): """Test reading big endian raises exception""" bytestream = b'\xFE\xFF\x00\xE0' \ b'\x00\x00\x00\x00' fp = DicomBytesIO(bytestream) fp.is_little_endian = False with pytest.raises(ValueError, match="'fp.is_little_endian' must be True"): get_frame_offsets(fp)
def test_item_undefined_length(self): """Test exception raised if item length undefined.""" bytestream = (b'\xFE\xFF\x00\xE0' b'\xFF\xFF\xFF\xFF' b'\x00\x00\x00\x01') fp = DicomBytesIO(bytestream) fp.is_little_endian = True with pytest.raises(ValueError): get_nr_fragments(fp)
def test_read_be_us(self): """Test DicomIO.read_beUS indirectly""" # US are 2 bytes fixed bytestream = b'\x00\x00\x00\xFF\xFF\xFE' fp = DicomBytesIO(bytestream) fp.is_little_endian = True assert fp.read_beUS() == 0 assert fp.read_beUS() == 255 assert fp.read_beUS() == 0xFFFE