def get_all_gradients(session_label, dti_stack, decimals=None): """ Parses a list of dti sidecar files for subject. Returns ======= list of np.array """ gradients_per_frame = list() gradients_as_array = np.asanyarray([]) error_xml_path_list=[] error_msg="" for xml_path in dti_stack: xml_sidecar = read_xml_sidecar(xml_path) try: gradients_per_frame.append(get_gradient_table(xml_sidecar, decimals=decimals)) gradients_as_array = np.asanyarray(gradients_per_frame) except Exception as e: error_xml_path_list.append(xml_path) error_msg=str(e) if error_xml_path_list != [] : slog.info(session_label, 'ERROR: Could not get gradient table from xml sidecar', script='xnat/check_gradient_tables.py', sidecar=str(xml_sidecar), error_xml_path_list=str(error_xml_path_list), error_msg=error_msg) return gradients_as_array
def verify_image_count(session, session_label, scan, scantype, manufacturer, images_created): if manufacturer in expected_images.keys(): if scantype in expected_images[manufacturer].keys(): imgrange = expected_images[manufacturer][scantype] if not images_created in imgrange: error = 'WARNING: Number of frames in archive differ from the standard' slog.info(session_label, error, session=session, scan_number=scan, scan_type=scantype, actual_frame_number=images_created, manufacturer=manufacturer, expected_frame_number=str(expected_images[manufacturer][scantype]))
def export_to_nifti(interface, project, subject, session, session_label, manufacturer, scanner_model, scan, scantype, verbose=False): if verbose: print "Starting export of nifti files for ", project, subject, session, session_label, scan, scantype error_msg = [] # logfile_resource = '%s_%s/dcm2image.log' % (scan, scantype) # xnat_log = interface.select.project(project).subject(subject).experiment(session).resource('nifti').file(logfile_resource) # To test gradient directions without having to delete nifti files in xnat just uncomment this line # and comment out the proceeding one # if not xnat_log.exists() or 'dti60b1000' in scantype: # if not xnat_log.exists(): match = re.match('.*(/fs/storage/XNAT/.*)scan_.*_catalog.xml.*', interface.select.experiment(session).scan(scan).get(), re.DOTALL) if match: dicom_path = match.group(1) if not os.path.exists(dicom_path): dicom_path = re.sub('storage/XNAT', 'ncanda-xnat', dicom_path) if not os.path.exists(dicom_path): error_msg.append( "Path %s does not exist - export_to_nifti failed for SID:" "%s EID:%s Label: %s!" % (dicom_path, subject, session, session_label)) return error_msg,0 # if nifti files were created make sure that they are newer than dicom file otherwise recreate them nifti_log_search = glob.glob(re.sub('/DICOM/','_%s/dcm2image.log' % (scantype),re.sub( '/SCANS/', '/RESOURCES/nifti/', dicom_path))) if nifti_log_search != [] : # we changed code here from *.* to avoid getting errors of not finding dicom files for sessions such as # NCANDA_E01386 / B-00454-M-9-20140214 - scan 1 /ncanda-localizer-v1 dicom_file_pattern = dicom_path + '*' dicom_file_list = glob.glob(dicom_file_pattern) # ommit xml file - so that only dicom files are left - xml file is updated every time somebody changes something in the gui for that session - which has no meaning for xml file dicom_file_list = [x for x in dicom_file_list if '.xml' not in x ] # if dicom file is not there something odd is going on if dicom_file_list == [] : slog.info(session_label, "Error: could not find dicom files ", session=session, subject=subject, scan_number=scan, scan_type=scantype, dicom_log_file=dicom_file_pattern) return error_msg,0 # check time stamp - if newer than there is nothing to do nifti_time = time.strftime('%Y-%m-%d %H:%m:%S',time.gmtime(os.path.getmtime(nifti_log_search[0]))) dicom_time = time.strftime('%Y-%m-%d %H:%m:%S',time.gmtime(os.path.getmtime(dicom_file_list[0]))) if nifti_time > dicom_time : if verbose: print("... nothing to do as nifti files are up to date") return error_msg,0 slog.info(session_label + "_" + scan, "Warning: nifti seem outdated (dicom > nifti time) so they are recreated!", session=session, subject=subject, check_nifti = str(nifti_time) + " " + str(nifti_log_search[0]), check_dicom = str(dicom_time) + " " + str(dicom_file_list[0]), info = "If the issue reappears then simply open up the session in XNAT, go to 'Manage Files', delete the directory 'Resources/nifti/" + scantype + "', and run the script again. If it still reappears then repeat the previous procedure and afterwards delete the directory that the log file in check_nifti is located!") temp_dir = tempfile.mkdtemp() zip_path = '%s/%s_%s.zip' % (temp_dir, scan, scantype) log_filename = '%s/%s_%s/dcm2image.log' % ( temp_dir, scan, scantype) dcm2image_command = 'cmtk dcm2image --tolerance 1e-3 ' \ '--write-single-slices --no-progress ' \ '-rvxO %s/%s_%s/image%%n.nii %s 2>&1' % ( temp_dir, scan, scantype, dicom_path) output = "" try: output = subprocess.check_output(dcm2image_command, shell=True) except: error_msg.append( "The following command failed %s with the following output %s" % dcm2image_command,output) if len(error_msg) == 0: output_file = open(log_filename, 'w') try: output_file.writelines(output) finally: output_file.close() try: fzip = zipfile.ZipFile(zip_path, 'w') for src in sorted(glob.glob('%s/*/*' % temp_dir)): fzip.write(src, re.sub('%s/' % temp_dir, '', src)) fzip.close() except: error_msg.append("Could not zip %s" % zip_path) if os.path.exists(zip_path): try: interface.select.project(project).subject( subject).experiment(session).resource( 'nifti').put_zip(zip_path, overwrite=True, extract=True) except: error_msg.append( "Unable to upload ZIP file %s to experiment %s" % ( zip_path, session)) # Verify image counts for various series images_created = len(glob.glob('%s/*/*.nii.gz' % temp_dir)) if images_created > 0: # only use first part of string so GE Manufacturer is turned into GE manufacturer_u = manufacturer.split(' ',1)[0].upper() verify_image_count(session, session_label, scan, scantype, manufacturer_u, images_created) if 'dti60b1000' in scantype: xml_search_string = os.path.join(temp_dir,'%s_%s' % (scan,scantype), 'image*.nii.xml') xml_file_list = glob.glob(xml_search_string) cgt.check_diffusion(session_label,session,xml_file_list,manufacturer_u,scanner_model,decimals=2) # Clean up - remove temp directory shutil.rmtree(temp_dir) return error_msg,1
def check_diffusion(session_label,session,xml_file_list,manufacturer,scanner_model,decimals): if len(xml_file_list) == 0 : slog.info(session_label, "Error: check_diffusion : xml_file_list is empty ", session=session) return truth_gradient = np.array(get_ground_truth_gradients(manufacturer,scanner_model,decimals)) if len(truth_gradient) == 0 : slog.info(session_label, 'ERROR: check_diffusion: scanner is unknown', session=session, scanner=manufacturer) return xml_file_list.sort() errorsFrame = list() errorsExpected = list() errorsActual = list() try: evaluated_gradients = get_all_gradients(session_label,xml_file_list, decimals) if len(evaluated_gradients) == len(truth_gradient): for idx, frame in enumerate(evaluated_gradients): # if there is a frame that doesn't match, # report it. if not (truth_gradient[idx] == frame).all(): errorsFrame.append(idx) errorsActual.append(frame) errorsExpected.append(truth_gradient[idx]) else: slog.info(session_label,"ERROR: Incorrect number of frames.", case_gradients=str(evaluated_gradients), expected=str(truth_gradient), session=session) except AttributeError as error: slog.info(session_label, "Error: parsing XML files failed.", xml_file_list=str(xml_file_list), error_msg=str(error), session=session) return if errorsFrame: slog.info(session_label, "Errors in dti601000 gradients for new sessions after comparing with ground_truth.", frames=str(errorsFrame), actualGradients=str(errorsActual), expectedGradients=str(errorsExpected), session=session) xml_file = open(xml_file_list[0], 'r') try: for line in xml_file: match = re.match('.*<phaseEncodeDirectionSign>(.+)' '</phaseEncodeDirectionSign>.*', line) if match and match.group(1).upper() != 'NEG': slog.info(session_label, "dti601000 has wrong PE sign (expected NEG).", actual_sign=str(match.group(1).upper()), session=session) except AttributeError as error: slog.info(session_label, "Error: parsing XML files failed.", xml_file=xml_file_list[0], error=str(error), session=session) finally: xml_file.close() return