def recalculate_total_mu(*custom_condition): if custom_condition: custom_condition = " AND " + custom_condition[0] else: custom_condition = '' # Get entire table beam_data = QuerySQL('Beams', "mrn != ''" + custom_condition) cnx = DVH_SQL() plan_mus = {} for i in range(0, len(beam_data.study_instance_uid)): uid = beam_data.study_instance_uid[i] beam_mu = beam_data.beam_mu[i] fxs = float(beam_data.fx_count[i]) if uid not in list(plan_mus): plan_mus[uid] = 0. plan_mus[uid] += beam_mu * fxs for uid in list(plan_mus): cnx.update('Plans', 'total_mu', str(round(plan_mus[uid], 1)), "study_instance_uid = '%s'" % uid) cnx.close()
def validate_sql_connection(*config, **kwargs): if config: try: cnx = DVH_SQL(config[0]) cnx.close() valid = True except: valid = False else: try: cnx = DVH_SQL() cnx.close() valid = True except: valid = False if not kwargs or ('verbose' in kwargs and kwargs['verbose']): if valid: print("SQL DB is alive!") else: print("Connection to SQL DB could not be established.") if not is_sql_connection_defined(): print( "ERROR: SQL settings are not yet defined. Please run:\n", " $ dvh settings --sql", sep="") return valid
def validate_sql_connection(config=None, verbose=False): """ :param config: a dict with keys 'host', 'dbname', 'port' and optionally 'user' and 'password' :param verbose: boolean indicating if cmd line printing should be performed :return: """ valid = True if config: try: cnx = DVH_SQL(config) cnx.close() except: valid = False else: try: cnx = DVH_SQL() cnx.close() except: valid = False if verbose: if valid: print("SQL DB is alive!") else: print("Connection to SQL DB could not be established.") if not is_sql_connection_defined(): print( "ERROR: SQL settings are not yet defined. Please run:\n", " $ dvh settings --sql", sep="") return valid
def reinitialize_roi_categories_in_database(): roi_map = DatabaseROIs() dvh_data = QuerySQL('DVHs', "mrn != ''") cnx = DVH_SQL() for i in range(0, len(dvh_data.roi_name)): uid = dvh_data.study_instance_uid[i] physician = get_physician_from_uid(uid) roi_name = dvh_data.roi_name[i] new_physician_roi = roi_map.get_physician_roi(physician, roi_name) new_institutional_roi = roi_map.get_institutional_roi( physician, roi_name) print(i, physician, new_institutional_roi, new_physician_roi, roi_name, sep=' ') condition = "study_instance_uid = '" + uid + "'" + "and roi_name = '" + roi_name + "'" cnx.update('DVHs', 'physician_roi', new_physician_roi, condition) cnx.update('DVHs', 'institutional_roi', new_institutional_roi, condition) cnx.close()
def beam_complexities(*condition): if condition: condition = condition[0] cnx = DVH_SQL() uids = cnx.get_unique_values('Beams', 'study_instance_uid', condition, return_empty=True) for uid in uids: beam_complexity(cnx, uid) cnx.close()
def update_all_plan_toxicity_grades(*condition): if condition: condition = condition[0] cnx = DVH_SQL() uids = cnx.get_unique_values('Plans', 'study_instance_uid', condition, return_empty=True) for uid in uids: update_plan_toxicity_grades(uid) cnx.close()
def recalculate_ages(*custom_condition): if custom_condition: custom_condition = " AND " + custom_condition[0] else: custom_condition = '' dvh_data = QuerySQL('Plans', "mrn != ''" + custom_condition) cnx = DVH_SQL() for i in range(0, len(dvh_data.mrn)): mrn = dvh_data.mrn[i] uid = dvh_data.study_instance_uid[i] sim_study_date = dvh_data.sim_study_date[i].split('-') birth_date = dvh_data.birth_date[i].split('-') try: birth_year = int(birth_date[0]) birth_month = int(birth_date[1]) birth_day = int(birth_date[2]) birth_date_obj = datetime(birth_year, birth_month, birth_day) sim_study_year = int(sim_study_date[0]) sim_study_month = int(sim_study_date[1]) sim_study_day = int(sim_study_date[2]) sim_study_date_obj = datetime(sim_study_year, sim_study_month, sim_study_day) if sim_study_date == '1800-01-01': age = '(NULL)' else: age = relativedelta(sim_study_date_obj, birth_date_obj).years condition = "study_instance_uid = '" + uid + "'" cnx.update('Plans', 'age', str(age), condition) except: print("Update Failed for", mrn, "sim date:", sim_study_date, "birthdate", birth_date, sep=' ') cnx.close()
def update_uncategorized_rois_in_database(): roi_map = DatabaseROIs() dvh_data = QuerySQL('DVHs', "physician_roi = 'uncategorized'") cnx = DVH_SQL() for i in range(len(dvh_data.roi_name)): uid = dvh_data.study_instance_uid[i] mrn = dvh_data.mrn[i] physician = get_physician_from_uid(uid) roi_name = dvh_data.roi_name[i] new_physician_roi = roi_map.get_physician_roi(physician, roi_name) new_institutional_roi = roi_map.get_institutional_roi(physician, roi_name) if new_physician_roi != 'uncategorized': print(mrn, physician, new_institutional_roi, new_physician_roi, roi_name, sep=' ') condition = "study_instance_uid = '" + uid + "'" + "and roi_name = '" + roi_name + "'" cnx.update('DVHs', 'physician_roi', new_physician_roi, condition) cnx.update('DVHs', 'institutional_roi', new_institutional_roi, condition) cnx.close()
def dicom_to_sql(start_path=None, force_update=False, move_files=True, update_dicom_catalogue_table=True): start_time = datetime.now() print(str(start_time), 'Beginning import', sep=' ') dicom_catalogue_update = [] # Read SQL configuration file abs_file_path = get_settings('import') import_settings = parse_settings_file(abs_file_path) if start_path: abs_file_path = os.path.join(SCRIPT_DIR, start_path) import_settings['inbox'] = abs_file_path sqlcnx = DVH_SQL() file_paths = get_file_paths(import_settings['inbox']) for uid in list(file_paths): if is_uid_imported(uid): print("The UID from the following files is already imported.") if not force_update: print( "Must delete content associated with this UID from database before reimporting." ) print( "These files have been moved into the 'misc' folder within your 'imported' folder." ) for file_type in FILE_TYPES: print(file_paths[uid][file_type]['file_path']) print("The UID is %s" % uid) continue else: print("Force Update set to True. Processing with import.") print( "WARNING: This import may contain duplicate data already in the database." ) dicom_catalogue_update.append(uid) # Collect and print the file paths plan_file = file_paths[uid]['rtplan']['latest_file'] struct_file = file_paths[uid]['rtstruct']['latest_file'] dose_file = file_paths[uid]['rtdose']['latest_file'] if IMPORT_LATEST_PLAN_ONLY: print("plan file: %s" % plan_file) else: for f in file_paths[uid]['rtplan']['file_path']: print("plan file: %s" % f) print("struct file: %s" % struct_file) print("dose file: %s" % dose_file) # Process DICOM files into Python objects plan, beams, dvhs, rxs = [], [], [], [] mp, ms, md = [], [], [] if plan_file: mp = dicom.read_file(plan_file).ManufacturerModelName.lower() if struct_file: ms = dicom.read_file(struct_file).ManufacturerModelName.lower() if dose_file: md = dicom.read_file(dose_file).ManufacturerModelName.lower() if 'gammaplan' in "%s %s %s" % (mp, ms, md): print( "Leksell Gamma Plan is not currently supported. Skipping import." ) continue if plan_file and struct_file and dose_file: if IMPORT_LATEST_PLAN_ONLY: plan = PlanRow(plan_file, struct_file, dose_file) sqlcnx.insert_plan(plan) else: for f in file_paths[uid]['rtplan']['file_path']: plan = PlanRow(f, struct_file, dose_file) sqlcnx.insert_plan(plan) else: print( 'WARNING: Missing complete set of plan, struct, and dose files for uid %s' % uid) if not force_update: print( 'WARNING: Skipping this import. If you wish to import an incomplete DICOM set, use Force Update' ) print( 'WARNING: The current file will be moved to the misc folder with in your imported folder' ) continue if plan_file: if not hasattr(dicom.read_file(plan_file), 'BrachyTreatmentType'): if IMPORT_LATEST_PLAN_ONLY: beams = BeamTable(plan_file) sqlcnx.insert_beams(beams) else: for f in file_paths[uid]['rtplan']['file_path']: sqlcnx.insert_beams(BeamTable(f)) if struct_file and dose_file: dvhs = DVHTable(struct_file, dose_file) setattr(dvhs, 'ptv_number', rank_ptvs_by_D95(dvhs)) sqlcnx.insert_dvhs(dvhs) if plan_file and struct_file: if IMPORT_LATEST_PLAN_ONLY: rxs = RxTable(plan_file, struct_file) sqlcnx.insert_rxs(rxs) else: for f in file_paths[uid]['rtplan']['file_path']: sqlcnx.insert_rxs(RxTable(f, struct_file)) # get mrn for folder name, can't assume a complete set of dose, plan, struct files mrn = [] if dose_file: mrn = dicom.read_file(dose_file).PatientID elif plan_file: mrn = dicom.read_file(plan_file).PatientID elif struct_file: mrn = dicom.read_file(struct_file).PatientID if mrn: mrn = "".join(x for x in mrn if x.isalnum()) # remove any special characters else: mrn = 'NoMRN' # convert file_paths[uid] into a list of file paths if move_files: files_to_move = [] move_types = list(FILE_TYPES) + ['other'] for file_type in move_types: files_to_move.extend(file_paths[uid][file_type]['file_path']) new_folder = os.path.join(import_settings['imported'], mrn) move_files_to_new_path(files_to_move, new_folder) if plan_file: plan_file = os.path.basename(plan_file) if struct_file: struct_file = os.path.basename(struct_file) if dose_file: dose_file = os.path.basename(dose_file) if update_dicom_catalogue_table: if not IMPORT_LATEST_PLAN_ONLY: plan_file = ', '.join([ os.path.basename(fp) for fp in file_paths[uid]['rtplan']['file_path'] ]) update_dicom_catalogue(mrn, uid, new_folder, plan_file, struct_file, dose_file) # Move remaining files, if any if move_files: move_all_files(import_settings['imported'], import_settings['inbox']) remove_empty_folders(import_settings['inbox']) sqlcnx.close() end_time = datetime.now() print(str(end_time), 'Import complete', sep=' ') total_time = end_time - start_time seconds = total_time.seconds m, s = divmod(seconds, 60) h, m = divmod(m, 60) if h: print("This import took %dhrs %02dmin %02dsec to complete" % (h, m, s)) elif m: print("This import took %02dmin %02dsec to complete" % (m, s)) else: print("This import took %02dsec to complete" % s)