def _print_metadata(data, prettify=False, dicomize=False): logger.info('print metadata') if dicomize: if isinstance(data, list): for ds in data: dcm_ds = load_json_dataset(ds) print(dcm_ds) print('\n') else: dcm_ds = load_json_dataset(data) print(dcm_ds) elif prettify: print(json.dumps(data, indent=4, sort_keys=True)) else: print(json.dumps(data, sort_keys=True))
def _BuildSR(study_json, text, series_uid, instance_uid): # type: (Dict, str, str, str) -> pydicom.dataset.Dataset """Builds and returns a Basic Text DICOM Structured Report instance. Args: study_json: Dict of study level information to populate the SR. text: Text string to use for the Basic Text DICOM SR. series_uid: UID of the series to use for the SR. instance_uid: UID of the instance to use for the SR. Returns: pydicom.dataset.Dataset representing the Structured Report. """ dataset = load_json_dataset(study_json) dataset.SOPClassUID = tag_values.BASIC_TEXT_SR_CUID dataset.SeriesInstanceUID = series_uid dataset.SOPInstanceUID = instance_uid dataset.Modality = tag_values.SR_MODALITY content_dataset = pydicom.dataset.Dataset() dataset.ContentSequence = pydicom.sequence.Sequence([content_dataset]) content_dataset.RelationshipType = 'CONTAINS' content_dataset.ValueType = 'TEXT' content_dataset.TextValue = text dataset.fix_meta_info(enforce_standard=True) # Must be set but is overwritten later during `dcmwrite()`. dataset.file_meta.FileMetaInformationGroupLength = 0 return dataset
def test_load_json_dataset_pn_vm2_empty(httpserver, client, cache_dir): value = [{}] dicom_json = { '0008009C': { 'vr': 'PN', 'Value': value, }, } dataset = load_json_dataset(dicom_json) assert dataset.ConsultingPhysicianName == []
def test_load_json_dataset_tm(httpserver, client, cache_dir): value = ['113924'] dicom_json = { '00080030': { 'vr': 'TM', 'Value': value, }, } dataset = load_json_dataset(dicom_json) assert dataset.StudyTime == value[0]
def test_load_json_dataset_da(httpserver, client, cache_dir): value = ['2018-11-21'] dicom_json = { '00080020': { 'vr': 'DA', 'Value': value } } dataset = load_json_dataset(dicom_json) assert dataset.StudyDate == value[0]
def test_load_json_dataset_pn_vm1_empty(httpserver, client, cache_dir): value = [{}] dicom_json = { '00080090': { 'vr': 'PN', 'Value': value, }, } dataset = load_json_dataset(dicom_json) # This returns different results for Python2 (None) and Python3 ("") assert dataset.ReferringPhysicianName in (None, '')
def test_load_json_dataset_pn_vm2(httpserver, client, cache_dir): names = ['First^Person', 'Second^Person'] value = [{'Alphabetic': names[0]}, {'Alphabetic': names[1]}] dicom_json = { '0008009C': { 'vr': 'PN', 'Value': value, }, } dataset = load_json_dataset(dicom_json) assert dataset.ConsultingPhysicianName == names
def test_load_json_dataset_pn_vm1(httpserver, client, cache_dir): name = 'Only^Person' value = [{'Alphabetic': name}] dicom_json = { '00080090': { 'vr': 'PN', 'Value': value, }, } dataset = load_json_dataset(dicom_json) assert dataset.ReferringPhysicianName == name
def _BuildComprehensiveSR(instance_metadata, prediction_class, series_uid, instance_uid): # type: (Dict, str, str, str) -> pydicom.dataset.Dataset """Builds and returns a Comprehensive SR DICOM Structured Report instance. Args: instance_metadata: Dict of instance for which the prediction was made and from which the created SR document instance should inherit patient and study information. prediction_class: The BI-RADS breast density predicted class. series_uid: UID of the series to use for the SR. instance_uid: UID of the instance to use for the SR. Raises: ValueError if the prediction class is not in |_PREDICTION_CLASS_TO_CODE|. Returns: pydicom.dataset.Dataset representing the Structured Report. """ # Get code from prediction class. if prediction_class not in _PREDICTION_CLASS_TO_CODE.keys(): raise ValueError('Prediction class: %s is invalid must be in: %s' % (prediction_class, _PREDICTION_CLASS_TO_CODE.keys())) prediction_code = _PREDICTION_CLASS_TO_CODE[prediction_class] model_name = 'Breast Density Classification Model' # Description of the classification model. algorithm = templates.AlgorithmIdentification( name=model_name, version='v0.1.0', ) # Add prediction_code to qualitative evaluations measurements_group = templates.MeasurementsAndQualitativeEvaluations( tracking_identifier=templates.TrackingIdentifier( identifier='Classification of Breast Density'), algorithm_id=algorithm, qualitative_evaluations=[prediction_code]) device = templates.DeviceObserverIdentifyingAttributes( uid=pydicom.uid.generate_uid(prefix=None), model_name=model_name) observation_context = templates.ObservationContext( observer_device_context=templates.ObserverContext( observer_type=codes.DCM.Device, observer_identifying_attributes=device)) measurement_report = templates.MeasurementReport( observation_context=observation_context, procedure_reported=codes.SCT.ImagingProcedure, imaging_measurements=[measurements_group]) structured_report = sop.ComprehensiveSR( evidence=[load_json_dataset(instance_metadata)], content=measurement_report[0], series_number=1, series_instance_uid=series_uid, sop_instance_uid=instance_uid, instance_number=1, manufacturer='ML Codelab') return structured_report
def search_uids(dicomweb, uids): series_set = set() for uid in uids: log.info(' Searching studies and series with UID %s', uid) for uid_field in ('StudyInstanceUID', 'SeriesInstanceUID'): for series in dicomweb.search_for_series( search_filters={uid_field: uid}): dataset = load_json_dataset(series) series_set.add( (dataset.StudyInstanceUID, dataset.SeriesInstanceUID)) return sorted(series_set)
def test_store_instance_error_with_no_retries(httpserver, client, cache_dir): dataset = load_json_dataset({}) dataset.is_little_endian = True dataset.is_implicit_VR = True client.set_http_retry_params(retry=False) httpserver.serve_content(content='', code=HTTPStatus.REQUEST_TIMEOUT, headers='') with pytest.raises(HTTPError): client.store_instances([dataset]) assert len(httpserver.requests) == 1 request = httpserver.requests[0] assert request.headers['Content-Type'].startswith( 'multipart/related; type="application/dicom"')
def dicom_web_download_series(study_id, series_id, save_dir, client: DICOMwebClient, frame_fetch=False): start = time.time() # Limitation for DICOMWeb Client as it needs StudyInstanceUID to fetch series if not study_id: meta = load_json_dataset([ series for series in client.search_for_series( search_filters={"SeriesInstanceUID": series_id}) if series["0020000E"]["Value"] == [series_id] ][0]) study_id = str(meta["StudyInstanceUID"].value) os.makedirs(save_dir, exist_ok=True) if not frame_fetch: instances = client.retrieve_series(study_id, series_id) for instance in instances: instance_id = str(instance["SOPInstanceUID"].value) file_name = os.path.join(save_dir, f"{instance_id}.dcm") instance.save_as(file_name) else: # TODO:: This logic (combining meta+pixeldata) needs improvement def save_from_frame(m): d = load_json_dataset(m) instance_id = str(d["SOPInstanceUID"].value) # Hack to merge Info + RawData d.is_little_endian = True d.is_implicit_VR = True d.PixelData = client.retrieve_instance_frames( study_instance_uid=study_id, series_instance_uid=series_id, sop_instance_uid=instance_id, frame_numbers=[1], )[0] file_name = os.path.join(save_dir, f"{instance_id}.dcm") logger.info(f"++ Saved {file_name}") d.save_as(file_name) meta_list = client.retrieve_series_metadata(study_id, series_id) with ThreadPoolExecutor(max_workers=2, thread_name_prefix="DICOMFetch") as executor: executor.map(save_from_frame, meta_list) logger.info(f"Time to download: {time.time() - start} (sec)")
def _save_metadata(data, directory, sop_instance_uid, prettify=False, dicomize=False): if dicomize: filename = '{}.dcm'.format(sop_instance_uid) else: filename = '{}.json'.format(sop_instance_uid) filepath = os.path.join(directory, filename) logger.info('save metadata to file: {}'.format(filepath)) if dicomize: dataset = load_json_dataset(data) dataset.save_as(filepath) else: with open(filepath, 'w') as f: if prettify: json.dump(data, f, indent=4, sort_keys=True) else: json.dump(data, f, sort_keys=True)
def test_search_for_studies_dicomize(parser, httpserver, cache_dir, capsys): cache_filename = str(cache_dir.joinpath('search_for_studies.json')) with open(cache_filename, 'r') as f: content = f.read() parsed_content = json.loads(content) dicomized_content = '\n\n\n'.join( [repr(load_json_dataset(instance)) for instance in parsed_content]) dicomized_content += '\n\n\n' headers = {'content-type': 'application/dicom+json'} httpserver.serve_content(content=content, code=200, headers=headers) args = parser.parse_args( ['--url', httpserver.url, 'search', 'studies', '--dicomize']) with pytest.raises(SystemExit) as exit: main(args) assert exit.value.code == 0 stdout, stderr = capsys.readouterr() assert stdout == dicomized_content
def save_from_frame(m): d = load_json_dataset(m) instance_id = str(d["SOPInstanceUID"].value) # Hack to merge Info + RawData d.is_little_endian = True d.is_implicit_VR = True d.PixelData = client.retrieve_instance_frames( study_instance_uid=study_id, series_instance_uid=series_id, sop_instance_uid=instance_id, frame_numbers=[1], )[0] file_name = os.path.join(save_dir, f"{instance_id}.dcm") logger.info(f"++ Saved {file_name}") d.save_as(file_name)
def _IsMammoInstance(self, input_client, instance_path): # type: (dicomweb_client.DICOMwebClient, dicom_path.Path) -> bool """Returns whether the DICOM instance is of type MG modality. Args: input_client: dicomweb_client.DICOMwebClient used to perform search query. instance_path: dicom_path.Path of DICOM instance. Returns: ParsedMessage or None if the message is invalid. """ instance_json_list = input_client.search_for_instances( instance_path.study_uid, instance_path.series_uid, fields=[_MODALITY_KEY_WORD], search_filters={'SOPInstanceUID': instance_path.instance_uid}) if not instance_json_list: return False dataset = load_json_dataset(instance_json_list[0]) return dataset.get(_MODALITY_KEY_WORD) == tag_values.MG_MODALITY