def _get_pixel_array(self): # Check if already have converted to a NumPy array # Also check if self.PixelData has changed. If so, get new NumPy array already_have = True if not hasattr(self, "_pixel_array"): already_have = False elif self._pixel_id != id(self.PixelData): already_have = False if not already_have and not self._is_uncompressed_transfer_syntax(): try: # print("Pixel Data is compressed") self._pixel_array = self._compressed_pixel_data_numpy() self._pixel_id = id( self.PixelData ) # is this guaranteed to work if memory is re-used?? return self._pixel_array except IOError as I: logger.info( "Pillow or JPLS did not support this transfer syntax") if not already_have: self._pixel_array = self._pixel_data_numpy() self._pixel_id = id( self.PixelData ) # is this guaranteed to work if memory is re-used?? return self._pixel_array
def _read_file_meta_info(fp: BinaryIO) -> FileMetaDataset: """Return a Dataset containing any File Meta (0002,eeee) elements in `fp`. File Meta elements are always Explicit VR Little Endian (DICOM Standard, Part 10, :dcm:`Section 7<part10/chapter_7.html>`). Once any File Meta elements are read `fp` will be positioned at the start of the next group of elements. Parameters ---------- fp : file-like The file-like positioned at the start of any File Meta Information group elements. Returns ------- dataset.Dataset The File Meta elements as a Dataset instance. May be empty if no File Meta are present. """ def _not_group_0002(tag: BaseTag, vr: Optional[str], length: int) -> bool: """Return True if the tag is not in group 0x0002, False otherwise.""" return tag.group != 2 start_file_meta = fp.tell() file_meta = FileMetaDataset( read_dataset(fp, is_implicit_VR=False, is_little_endian=True, stop_when=_not_group_0002)) if not file_meta._dict: return file_meta # Test the file meta for correct interpretation by requesting the first # data element: if it fails, retry loading the file meta with an # implicit VR (issue #503) try: file_meta[list(file_meta.elements())[0].tag] except NotImplementedError: fp.seek(start_file_meta) file_meta = FileMetaDataset( read_dataset(fp, is_implicit_VR=True, is_little_endian=True, stop_when=_not_group_0002)) # Log if the Group Length doesn't match actual length if 'FileMetaInformationGroupLength' in file_meta: # FileMetaInformationGroupLength must be 12 bytes long and its value # counts from the beginning of the next element to the end of the # file meta elements actual_len = fp.tell() - (start_file_meta + 12) elem_len = file_meta.FileMetaInformationGroupLength if elem_len != actual_len: logger.info( "_read_file_meta_info: (0002,0000) 'File Meta Information " "Group Length' value doesn't match the actual File Meta " f"Information length ({elem_len} vs {actual_len} bytes)") return file_meta
def read_preamble(fp: BinaryIO, force: bool) -> Optional[bytes]: """Return the 128-byte DICOM preamble in `fp` if present. `fp` should be positioned at the start of the file-like. If the preamble and prefix are found then after reading `fp` will be positioned at the first byte after the prefix (byte offset 133). If either the preamble or prefix are missing and `force` is ``True`` then after reading `fp` will be positioned at the start of the file-like. Parameters ---------- fp : file-like object The file-like to read the preamble from. force : bool Flag to force reading of a file even if no header is found. Returns ------- preamble : bytes or None The 128-byte DICOM preamble will be returned if the appropriate prefix ('DICM') is found at byte offset 128. Returns ``None`` if the 'DICM' prefix is not found and `force` is ``True``. Raises ------ InvalidDicomError If `force` is ``False`` and no appropriate header information found. Notes ----- Also reads past the 'DICM' marker. Rewinds file to the beginning if no header found. """ logger.debug("Reading File Meta Information preamble...") preamble = fp.read(128) if config.debugging: sample = bytes2hex(preamble[:8]) + "..." + bytes2hex(preamble[-8:]) logger.debug(f"{fp.tell() - 128:08x}: {sample}") logger.debug("Reading File Meta Information prefix...") magic = fp.read(4) if magic != b"DICM" and force: logger.info( "File is not conformant with the DICOM File Format: 'DICM' " "prefix is missing from the File Meta Information header " "or the header itself is missing. Assuming no header and " "continuing.") fp.seek(0) return None if magic != b"DICM" and not force: raise InvalidDicomError( "File is missing DICOM File Meta Information header or the 'DICM' " "prefix is missing from the header. Use force=True to force " "reading.") else: logger.debug(f"{fp.tell() - 4:08x}: 'DICM' prefix found") return preamble
def _read_file_meta_info(fp): """Return the file meta information. fp must be set after the 128 byte preamble and 'DICM' marker """ # File meta info always LittleEndian, Explicit VR. After will change these # to the transfer syntax values set in the meta info # Get group length data element, whose value is the length of the meta_info fp_save = fp.tell() # in case need to rewind debugging = config.debugging if debugging: logger.debug("Try to read group length info...") bytes_read = fp.read(8) group, elem, VR, length = unpack("<HH2sH", bytes_read) if debugging: debug_msg = "{0:08x}: {1}".format(fp.tell() - 8, bytes2hex(bytes_read)) if not in_py2: VR = VR.decode(default_encoding) if VR in extra_length_VRs: bytes_read = fp.read(4) length = unpack("<L", bytes_read)[0] if debugging: debug_msg += " " + bytes2hex(bytes_read) if debugging: debug_msg = "{0:<47s} ({1:04x}, {2:04x}) {3:2s} Length: {4:d}".format(debug_msg, group, elem, VR, length) logger.debug(debug_msg) # Store meta group length if it exists, then read until not group 2 if group == 2 and elem == 0: bytes_read = fp.read(length) if debugging: logger.debug("{0:08x}: {1}".format(fp.tell() - length, bytes2hex(bytes_read))) group_length = unpack("<L", bytes_read)[0] expected_ds_start = fp.tell() + group_length if debugging: msg = "value (group length) = {0:d}".format(group_length) msg += " regular dataset should start at {0:08x}".format(expected_ds_start) logger.debug(" " * 10 + msg) else: expected_ds_start = None if debugging: logger.debug(" " * 10 + "(0002,0000) Group length not found.") # Changed in pydicom 0.9.7 -- don't trust the group length, just read # until no longer group 2 data elements. But check the length and # give a warning if group 2 ends at different location. # Rewind to read the first data element as part of the file_meta dataset if debugging: logger.debug("Rewinding and reading whole dataset " "including this first data element") fp.seek(fp_save) file_meta = read_dataset(fp, is_implicit_VR=False, is_little_endian=True, stop_when=not_group2) fp_now = fp.tell() if expected_ds_start and fp_now != expected_ds_start: logger.info("*** Group length for file meta dataset " "did not match end of group 2 data ***") else: if debugging: logger.debug("--- End of file meta data found " "as expected ---------") return file_meta
def _read_file_meta_info(fp): """Return a Dataset containing any File Meta (0002,eeee) elements in `fp`. File Meta elements are always Explicit VR Little Endian (as per PS3.10 Section 7). Once any File Meta elements are read `fp` will be positioned at the start of the next group of elements. Parameters ---------- fp : file-like The file-like positioned at the start of any File Meta Information group elements. Returns ------- pydicom.dataset.Dataset The File Meta elements as a Dataset instance. May be empty if no File Meta are present. """ def _not_group_0002(tag, VR, length): """Return True if the tag is not in group 0x0002, False otherwise.""" return tag.group != 2 start_file_meta = fp.tell() file_meta = read_dataset(fp, is_implicit_VR=False, is_little_endian=True, stop_when=_not_group_0002) if not file_meta: return file_meta # Test the file meta for correct interpretation by requesting the first # data element: if it fails, retry loading the file meta with an # implicit VR (issue #503) try: file_meta[list(file_meta.elements())[0].tag] except NotImplementedError: fp.seek(start_file_meta) file_meta = read_dataset(fp, is_implicit_VR=True, is_little_endian=True, stop_when=_not_group_0002) # Log if the Group Length doesn't match actual length if 'FileMetaInformationGroupLength' in file_meta: # FileMetaInformationGroupLength must be 12 bytes long and its value # counts from the beginning of the next element to the end of the # file meta elements length_file_meta = fp.tell() - (start_file_meta + 12) if file_meta.FileMetaInformationGroupLength != length_file_meta: logger.info("_read_file_meta_info: (0002,0000) 'File Meta " "Information Group Length' value doesn't match the " "actual File Meta Information length ({0} vs {1} " "bytes).".format( file_meta.FileMetaInformationGroupLength, length_file_meta)) return file_meta
def read_preamble(fp, force): """Return the 128-byte DICOM preamble in `fp` if present. `fp` should be positioned at the start of the file-like. If the preamble and prefix are found then after reading `fp` will be positioned at the first byte after the prefix (byte offset 133). If either the preamble or prefix are missing and `force` is True then after reading `fp` will be positioned at the start of the file-like. Parameters ---------- fp : file-like object The file-like to read the preamble from. force : bool Flag to force reading of a file even if no header is found. Returns ------- preamble : str/bytes or None The 128-byte DICOM preamble will be returned if the appropriate prefix ('DICM') is found at byte offset 128. Returns None if the 'DICM' prefix is not found and `force` is True. Raises ------ InvalidDicomError If `force` is False and no appropriate header information found. Notes ----- Also reads past the 'DICM' marker. Rewinds file to the beginning if no header found. """ logger.debug("Reading File Meta Information preamble...") preamble = fp.read(128) if config.debugging: sample = bytes2hex(preamble[:8]) + "..." + bytes2hex(preamble[-8:]) logger.debug("{0:08x}: {1}".format(fp.tell() - 128, sample)) logger.debug("Reading File Meta Information prefix...") magic = fp.read(4) if magic != b"DICM" and force: logger.info( "File is not conformant with the DICOM File Format: 'DICM' " "prefix is missing from the File Meta Information header " "or the header itself is missing. Assuming no header and " "continuing.") preamble = None fp.seek(0) elif magic != b"DICM" and not force: raise InvalidDicomError("File is missing DICOM File Meta Information " "header or the 'DICM' prefix is missing from " "the header. Use force=True to force reading.") else: logger.debug("{0:08x}: 'DICM' prefix found".format(fp.tell() - 4)) return preamble
def convert_pixel_data(self): """Convert the Pixel Data to a numpy array internally. Returns ------- None Converted pixel data is stored internally in the dataset. If a compressed image format, the image is decompressed, and any related data elements are changed accordingly. """ # Check if already have converted to a NumPy array # Also check if self.PixelData has changed. If so, get new NumPy array already_have = True if not hasattr(self, "_pixel_array"): already_have = False elif self._pixel_id != id(self.PixelData): already_have = False if not already_have: last_exception = None successfully_read_pixel_data = False for x in [ h for h in pydicom.config.image_handlers if h and h.supports_transfer_syntax(self) ]: try: pixel_array = x.get_pixeldata(self) self._pixel_array = self._reshape_pixel_array(pixel_array) if x.needs_to_convert_to_RGB(self): self._pixel_array = self._convert_YBR_to_RGB( self._pixel_array) successfully_read_pixel_data = True break except Exception as e: logger.debug("Trouble with", exc_info=e) last_exception = e continue if not successfully_read_pixel_data: handlers_tried = " ".join( [str(x) for x in pydicom.config.image_handlers]) logger.info("%s did not support this transfer syntax", handlers_tried) self._pixel_array = None self._pixel_id = None if last_exception: raise last_exception else: msg = ("No available image handler could " "decode this transfer syntax {}".format( self.file_meta.TransferSyntaxUID.name)) raise NotImplementedError(msg) # is this guaranteed to work if memory is re-used?? self._pixel_id = id(self.PixelData)
def convert_pixel_data(self): """Convert the Pixel Data to a numpy array internally. Returns ------- None Converted pixel data is stored internally in the dataset. If a compressed image format, the image is decompressed, and any related data elements are changed accordingly. """ # Check if already have converted to a NumPy array # Also check if self.PixelData has changed. If so, get new NumPy array already_have = True if not hasattr(self, "_pixel_array"): already_have = False elif self._pixel_id != id(self.PixelData): already_have = False if not already_have: last_exception = None successfully_read_pixel_data = False for x in [h for h in pydicom.config.image_handlers if h and h.supports_transfer_syntax(self)]: try: pixel_array = x.get_pixeldata(self) self._pixel_array = self._reshape_pixel_array(pixel_array) if x.needs_to_convert_to_RGB(self): self._pixel_array = self._convert_YBR_to_RGB( self._pixel_array ) successfully_read_pixel_data = True break except Exception as e: logger.debug("Trouble with", exc_info=e) last_exception = e continue if not successfully_read_pixel_data: handlers_tried = " ".join( [str(x) for x in pydicom.config.image_handlers]) logger.info("%s did not support this transfer syntax", handlers_tried) self._pixel_array = None self._pixel_id = None if last_exception: raise last_exception else: msg = ("No available image handler could " "decode this transfer syntax {}".format( self.file_meta.TransferSyntaxUID)) raise NotImplementedError(msg) # is this guaranteed to work if memory is re-used?? self._pixel_id = id(self.PixelData)
def read_preamble(fp, force): """Return the 128-byte DICOM preamble in `fp` if present. Parameters ---------- fp : file-like object The file-like to read the preamble from. force : bool Flag to force reading of a file even if no header is found. Returns ------- preamble : str/bytes or None The 128-byte DICOM preamble will be returned if the appropriate prefix ('DICM') is found at byte offset 128. Returns None if the 'DICM' prefix is not found and `force` is True. Raises ------ InvalidDicomError If `force` is False and no appropriate header information found. Notes ----- Also reads past the 'DICM' marker. Rewinds file to the beginning if no header found. """ logger.debug("Reading preamble...") preamble = fp.read(0x80) if config.debugging: sample = bytes2hex(preamble[:8]) + "..." + bytes2hex(preamble[-8:]) logger.debug("{0:08x}: {1}".format(fp.tell() - 0x80, sample)) magic = fp.read(4) if magic != b"DICM": if force: logger.info( "File is not a conformant DICOM file; 'DICM' prefix is " "missing from the file header or the header is " "missing. Assuming no header and continuing.") preamble = None fp.seek(0) else: raise InvalidDicomError("File is missing DICOM header or 'DICM' " "prefix is missing from the header. Use " "force=True to force reading.") else: logger.debug("{0:08x}: 'DICM' prefix found".format(fp.tell() - 4)) return preamble
def read_preamble(fp, force): """Read and return the DICOM preamble. Parameters ---------- fp : file-like object force : boolean Flag to force reading of a file even if no header is found. Returns ------- preamble : DICOM preamble, None The DICOM preamble will be returned if appropriate header ('DICM') is found. Returns None if no header is found. Raises ------ InvalidDicomError If force flag is false and no appropriate header information found. Notes ----- Also reads past the 'DICM' marker. Rewinds file to the beginning if no header found. """ logger.debug("Reading preamble...") preamble = fp.read(0x80) if config.debugging: sample = bytes2hex(preamble[:8]) + "..." + bytes2hex(preamble[-8:]) logger.debug("{0:08x}: {1}".format(fp.tell() - 0x80, sample)) magic = fp.read(4) if magic != b"DICM": if force: logger.info("File is not a standard DICOM file; 'DICM' header is " "missing. Assuming no header and continuing") preamble = None fp.seek(0) else: raise InvalidDicomError("File is missing 'DICM' marker. " "Use force=True to force reading") else: logger.debug("{0:08x}: 'DICM' marker found".format(fp.tell() - 4)) return preamble
def _get_pixel_array(self): # Check if already have converted to a NumPy array # Also check if self.PixelData has changed. If so, get new NumPy array already_have = True if not hasattr(self, "_pixel_array"): already_have = False elif self._pixel_id != id(self.PixelData): already_have = False if not already_have and not self._is_uncompressed_transfer_syntax(): try: # print("Pixel Data is compressed") self._pixel_array = self._compressed_pixel_data_numpy() self._pixel_id = id(self.PixelData) # is this guaranteed to work if memory is re-used?? return self._pixel_array except IOError as I: logger.info("Pillow or JPLS did not support this transfer syntax") if not already_have: self._pixel_array = self._pixel_data_numpy() self._pixel_id = id(self.PixelData) # is this guaranteed to work if memory is re-used?? return self._pixel_array
def _read_file_meta_info(fp): """Return a Dataset containing any File Meta (0002,eeee) elements in `fp`. File Meta elements are always Explicit VR Little Endian (as per PS3.10 Section 7). Once any File Meta elements are read `fp` will be positioned at the start of the next group of elements. Parameters ---------- fp : file-like The file-like positioned at the start of any File Meta Information group elements. Returns ------- pydicom.dataset.Dataset The File Meta elements as a Dataset instance. May be empty if no File Meta are present. """ def _not_group_0002(tag, VR, length): """Return True if the tag is not in group 0x0002, False otherwise.""" return (tag.group != 2) start_file_meta = fp.tell() file_meta = read_dataset(fp, is_implicit_VR=False, is_little_endian=True, stop_when=_not_group_0002) # Log if the Group Length doesn't match actual length if 'FileMetaInformationGroupLength' in file_meta: # FileMetaInformationGroupLength must be 12 bytes long and its value # counts from the beginning of the next element to the end of the # file meta elements length_file_meta = fp.tell() - (start_file_meta + 12) if file_meta.FileMetaInformationGroupLength != length_file_meta: logger.info("*** Group length for file meta dataset " "did not match end of group 2 data ***") return file_meta
def to_json(self, bulk_data_element_handler, bulk_data_threshold, dump_handler): """Converts a DataElement to JSON representation. Parameters ---------- bulk_data_element_handler: Union[Callable, None] callable that accepts a bulk data element and returns the "BulkDataURI" for retrieving the value of the data element via DICOMweb WADO-RS bulk_data_threshold: int size of base64 encoded data element above which a value will be provided in form of a "BulkDataURI" rather than "InlineBinary" Returns ------- dict mapping representing a JSON encoded data element Raises ------ TypeError when size of encoded data element exceeds `bulk_data_threshold` but `bulk_data_element_handler` is ``None`` and hence not callable """ # TODO: Determine whether more VRs need to be converted to strings _VRs_TO_QUOTE = [ 'AT', ] json_element = { 'vr': self.VR, } if self.VR in jsonrep.BINARY_VR_VALUES: if self.value is not None: binary_value = self.value encoded_value = base64.b64encode(binary_value).decode('utf-8') if len(encoded_value) > bulk_data_threshold: if bulk_data_element_handler is None: raise TypeError( 'No bulk data element handler provided to ' 'generate URL for value of data element "{}".'. format(self.name)) json_element['BulkDataURI'] = bulk_data_element_handler( self) else: logger.info('encode bulk data element "{}" inline'.format( self.name)) json_element['InlineBinary'] = encoded_value elif self.VR == 'SQ': # recursive call to co-routine to format sequence contents value = [ json.loads( e.to_json( bulk_data_element_handler=bulk_data_element_handler, bulk_data_threshold=bulk_data_threshold, dump_handler=dump_handler)) for e in self ] json_element['Value'] = value elif self.VR == 'PN': elem_value = self.value if elem_value is not None: if compat.in_py2: elem_value = PersonNameUnicode(elem_value, 'UTF8') if len(elem_value.components) > 2: json_element['Value'] = [ { 'Phonetic': elem_value.components[2], }, ] elif len(elem_value.components) > 1: json_element['Value'] = [ { 'Ideographic': elem_value.components[1], }, ] else: json_element['Value'] = [ { 'Alphabetic': elem_value.components[0], }, ] else: if self.value is not None: is_multivalue = isinstance(self.value, MultiValue) if self.VM > 1 or is_multivalue: value = self.value else: value = [self.value] # ensure it's a list and not another iterable # (e.g. tuple), which would not be JSON serializable if self.VR in _VRs_TO_QUOTE: json_element['Value'] = [str(v) for v in value] else: json_element['Value'] = [v for v in value] if hasattr(json_element, 'Value'): json_element['Value'] = jsonrep.convert_to_python_number( json_element['Value'], self.VR) return json_element
def to_json_dict(self, bulk_data_element_handler: Optional[Callable[ ["DataElement"], str]], bulk_data_threshold: int) -> Dict[str, object]: """Return a dictionary representation of the :class:`DataElement` conforming to the DICOM JSON Model as described in the DICOM Standard, Part 18, :dcm:`Annex F<part18/chaptr_F.html>`. .. versionadded:: 1.4 Parameters ---------- bulk_data_element_handler: callable or None Callable that accepts a bulk data element and returns the "BulkDataURI" for retrieving the value of the data element via DICOMweb WADO-RS bulk_data_threshold: int Size of base64 encoded data element above which a value will be provided in form of a "BulkDataURI" rather than "InlineBinary". Ignored if no bulk data handler is given. Returns ------- dict Mapping representing a JSON encoded data element """ json_element = { 'vr': self.VR, } if self.VR in jsonrep.BINARY_VR_VALUES: if not self.is_empty: binary_value = self.value encoded_value = base64.b64encode(binary_value).decode('utf-8') if (bulk_data_element_handler is not None and len(encoded_value) > bulk_data_threshold): json_element['BulkDataURI'] = ( bulk_data_element_handler(self)) else: logger.info( f"encode bulk data element '{self.name}' inline") json_element['InlineBinary'] = encoded_value elif self.VR == 'SQ': # recursive call to get sequence item JSON dicts value = [ ds.to_json(bulk_data_element_handler=bulk_data_element_handler, bulk_data_threshold=bulk_data_threshold, dump_handler=lambda d: d) for ds in self.value ] json_element['Value'] = value elif self.VR == 'PN': if not self.is_empty: elem_value = [] if self.VM > 1: value = self.value else: value = [self.value] for v in value: comps = {'Alphabetic': v.components[0]} if len(v.components) > 1: comps['Ideographic'] = v.components[1] if len(v.components) > 2: comps['Phonetic'] = v.components[2] elem_value.append(comps) json_element['Value'] = elem_value elif self.VR == 'AT': if not self.is_empty: value = self.value if self.VM == 1: value = [value] json_element['Value'] = [format(v, '08X') for v in value] else: if not self.is_empty: if self.VM > 1: value = self.value else: value = [self.value] json_element['Value'] = [v for v in value] if hasattr(json_element, 'Value'): json_element['Value'] = jsonrep.convert_to_python_number( json_element['Value'], self.VR) return json_element
def _read_file_meta_info(fp): """Return the file meta information. fp must be set after the 128 byte preamble and 'DICM' marker """ # File meta info always LittleEndian, Explicit VR. After will change these # to the transfer syntax values set in the meta info # Get group length data element, whose value is the length of the meta_info fp_save = fp.tell() # in case need to rewind debugging = config.debugging if debugging: logger.debug("Try to read group length info...") bytes_read = fp.read(8) group, elem, VR, length = unpack("<HH2sH", bytes_read) if debugging: debug_msg = "{0:08x}: {1}".format(fp.tell() - 8, bytes2hex(bytes_read)) if not in_py2: VR = VR.decode(default_encoding) if VR in extra_length_VRs: bytes_read = fp.read(4) length = unpack("<L", bytes_read)[0] if debugging: debug_msg += " " + bytes2hex(bytes_read) if debugging: debug_msg = "{0:<47s} ({1:04x}, {2:04x}) {3:2s} Length: {4:d}".format( debug_msg, group, elem, VR, length) logger.debug(debug_msg) # Store meta group length if it exists, then read until not group 2 if group == 2 and elem == 0: bytes_read = fp.read(length) if debugging: logger.debug("{0:08x}: {1}".format(fp.tell() - length, bytes2hex(bytes_read))) group_length = unpack("<L", bytes_read)[0] expected_ds_start = fp.tell() + group_length if debugging: msg = "value (group length) = {0:d}".format(group_length) msg += " regular dataset should start at {0:08x}".format( expected_ds_start) logger.debug(" " * 10 + msg) else: expected_ds_start = None if debugging: logger.debug(" " * 10 + "(0002,0000) Group length not found.") # Changed in pydicom 0.9.7 -- don't trust the group length, just read # until no longer group 2 data elements. But check the length and # give a warning if group 2 ends at different location. # Rewind to read the first data element as part of the file_meta dataset if debugging: logger.debug("Rewinding and reading whole dataset " "including this first data element") fp.seek(fp_save) file_meta = read_dataset(fp, is_implicit_VR=False, is_little_endian=True, stop_when=not_group2) fp_now = fp.tell() if expected_ds_start and fp_now != expected_ds_start: logger.info("*** Group length for file meta dataset " "did not match end of group 2 data ***") else: if debugging: logger.debug("--- End of file meta data found " "as expected ---------") return file_meta
def to_json_dict(self, bulk_data_element_handler, bulk_data_threshold): """Return a dictionary representation of the :class:`DataElement` conforming to the DICOM JSON Model as described in the DICOM Standard, Part 18, :dcm:`Annex F<part18/chaptr_F.html>`. Parameters ---------- bulk_data_element_handler: callable or None Callable that accepts a bulk data element and returns the "BulkDataURI" for retrieving the value of the data element via DICOMweb WADO-RS bulk_data_threshold: int Size of base64 encoded data element above which a value will be provided in form of a "BulkDataURI" rather than "InlineBinary" Returns ------- dict Mapping representing a JSON encoded data element Raises ------ TypeError When size of encoded data element exceeds `bulk_data_threshold` but `bulk_data_element_handler` is ``None`` and hence not callable """ json_element = {'vr': self.VR, } if self.VR in jsonrep.BINARY_VR_VALUES: if not self.is_empty: binary_value = self.value encoded_value = base64.b64encode(binary_value).decode('utf-8') if len(encoded_value) > bulk_data_threshold: if bulk_data_element_handler is None: raise TypeError( 'No bulk data element handler provided to ' 'generate URL for value of data element "{}".' .format(self.name) ) json_element['BulkDataURI'] = bulk_data_element_handler( self ) else: logger.info( 'encode bulk data element "{}" inline'.format( self.name ) ) json_element['InlineBinary'] = encoded_value elif self.VR == 'SQ': # recursive call to get sequence item JSON dicts value = [ ds.to_json( bulk_data_element_handler=bulk_data_element_handler, bulk_data_threshold=bulk_data_threshold, dump_handler=lambda d: d ) for ds in self ] json_element['Value'] = value elif self.VR == 'PN': if not self.is_empty: elem_value = [] if self.VM > 1: value = self.value else: value = [self.value] for v in value: if compat.in_py2: v = PersonNameUnicode(v, 'UTF8') comps = {'Alphabetic': v.components[0]} if len(v.components) > 1: comps['Ideographic'] = v.components[1] if len(v.components) > 2: comps['Phonetic'] = v.components[2] elem_value.append(comps) json_element['Value'] = elem_value elif self.VR == 'AT': if not self.is_empty: value = self.value if self.VM == 1: value = [value] json_element['Value'] = [format(v, '08X') for v in value] else: if not self.is_empty: if self.VM > 1: value = self.value else: value = [self.value] json_element['Value'] = [v for v in value] if hasattr(json_element, 'Value'): json_element['Value'] = jsonrep.convert_to_python_number( json_element['Value'], self.VR ) return json_element
def _read_file_meta_info(fp): """Return a Dataset containing any File Meta (0002,eeee) elements in `fp`. File Meta elements are always Explicit VR Little Endian (as per PS3.10 Section 7). Once any File Meta elements are read `fp` will be positioned at the start of the next group of elements. Parameters ---------- fp : file-like The file-like positioned at the start of any File Meta Information group elements. Returns ------- pydicom.dataset.Dataset The File Meta elements as a Dataset instance. May be empty if no File Meta are present. """ def _not_group_0002(tag, VR, length): """Return True if the tag is not in group 0x0002, False otherwise.""" return tag.group != 2 start_file_meta = fp.tell() file_meta = read_dataset(fp, is_implicit_VR=False, is_little_endian=True, stop_when=_not_group_0002) if not file_meta._dict: return file_meta # Test the file meta for correct interpretation by requesting the first # data element: if it fails, retry loading the file meta with an # implicit VR (issue #503) try: file_meta[list(file_meta.elements())[0].tag] except NotImplementedError: fp.seek(start_file_meta) file_meta = read_dataset(fp, is_implicit_VR=True, is_little_endian=True, stop_when=_not_group_0002) # Log if the Group Length doesn't match actual length if 'FileMetaInformationGroupLength' in file_meta: # FileMetaInformationGroupLength must be 12 bytes long and its value # counts from the beginning of the next element to the end of the # file meta elements length_file_meta = fp.tell() - (start_file_meta + 12) if file_meta.FileMetaInformationGroupLength != length_file_meta: logger.info("_read_file_meta_info: (0002,0000) 'File Meta " "Information Group Length' value doesn't match the " "actual File Meta Information length ({0} vs {1} " "bytes)." .format(file_meta.FileMetaInformationGroupLength, length_file_meta)) return file_meta