예제 #1
0
def load_directories():
    global directories
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Get Import settings
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    if is_import_settings_defined():
        directories = parse_settings_file(get_settings('import'))
    else:
        directories = {'inbox': '', 'imported': '', 'review': ''}
예제 #2
0
    def __init__(self, *config):
        if config:
            config = config[0]
        else:
            # Read SQL configuration file
            abs_file_path = get_settings('sql')
            config = parse_settings_file(abs_file_path)

        self.dbname = config['dbname']

        cnx = psycopg2.connect(**config)

        self.cnx = cnx
        self.cursor = cnx.cursor()
        self.tables = ['DVHs', 'Plans', 'Rxs', 'Beams', 'DICOM_Files']
예제 #3
0
def load_sql_settings():
    global config
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    # Get SQL settings
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    if is_sql_connection_defined():
        config = parse_settings_file(get_settings('sql'))

        if 'user' not in list(config):
            config['user'] = ''
            config['password'] = ''

        if 'password' not in list(config):
            config['password'] = ''

    else:
        config = {'host': 'localhost',
                  'dbname': 'dvh',
                  'port': '5432',
                  'user': '',
                  'password': ''}
예제 #4
0
def dicom_to_sql(start_path=None,
                 force_update=False,
                 move_files=True,
                 update_dicom_catalogue_table=True):

    start_time = datetime.now()
    print(str(start_time), 'Beginning import', sep=' ')

    dicom_catalogue_update = []

    # Read SQL configuration file
    abs_file_path = get_settings('import')
    import_settings = parse_settings_file(abs_file_path)

    if start_path:
        abs_file_path = os.path.join(SCRIPT_DIR, start_path)
        import_settings['inbox'] = abs_file_path

    sqlcnx = DVH_SQL()

    file_paths = get_file_paths(import_settings['inbox'])

    for uid in list(file_paths):

        if is_uid_imported(uid):
            print("The UID from the following files is already imported.")
            if not force_update:
                print(
                    "Must delete content associated with this UID from database before reimporting."
                )
                print(
                    "These files have been moved into the 'misc' folder within your 'imported' folder."
                )
                for file_type in FILE_TYPES:
                    print(file_paths[uid][file_type]['file_path'])
                print("The UID is %s" % uid)
                continue

            else:
                print("Force Update set to True. Processing with import.")
                print(
                    "WARNING: This import may contain duplicate data already in the database."
                )

        dicom_catalogue_update.append(uid)

        # Collect and print the file paths
        plan_file = file_paths[uid]['rtplan']['latest_file']
        struct_file = file_paths[uid]['rtstruct']['latest_file']
        dose_file = file_paths[uid]['rtdose']['latest_file']
        if IMPORT_LATEST_PLAN_ONLY:
            print("plan file: %s" % plan_file)
        else:
            for f in file_paths[uid]['rtplan']['file_path']:
                print("plan file: %s" % f)
        print("struct file: %s" % struct_file)
        print("dose file: %s" % dose_file)

        # Process DICOM files into Python objects
        plan, beams, dvhs, rxs = [], [], [], []
        mp, ms, md = [], [], []
        if plan_file:
            mp = dicom.read_file(plan_file).ManufacturerModelName.lower()
        if struct_file:
            ms = dicom.read_file(struct_file).ManufacturerModelName.lower()
        if dose_file:
            md = dicom.read_file(dose_file).ManufacturerModelName.lower()

        if 'gammaplan' in "%s %s %s" % (mp, ms, md):
            print(
                "Leksell Gamma Plan is not currently supported. Skipping import."
            )
            continue

        if plan_file and struct_file and dose_file:
            if IMPORT_LATEST_PLAN_ONLY:
                plan = PlanRow(plan_file, struct_file, dose_file)
                sqlcnx.insert_plan(plan)
            else:
                for f in file_paths[uid]['rtplan']['file_path']:
                    plan = PlanRow(f, struct_file, dose_file)
                    sqlcnx.insert_plan(plan)
        else:
            print(
                'WARNING: Missing complete set of plan, struct, and dose files for uid %s'
                % uid)
            if not force_update:
                print(
                    'WARNING: Skipping this import. If you wish to import an incomplete DICOM set, use Force Update'
                )
                print(
                    'WARNING: The current file will be moved to the misc folder with in your imported folder'
                )
                continue

        if plan_file:
            if not hasattr(dicom.read_file(plan_file), 'BrachyTreatmentType'):
                if IMPORT_LATEST_PLAN_ONLY:
                    beams = BeamTable(plan_file)
                    sqlcnx.insert_beams(beams)
                else:
                    for f in file_paths[uid]['rtplan']['file_path']:
                        sqlcnx.insert_beams(BeamTable(f))
        if struct_file and dose_file:
            dvhs = DVHTable(struct_file, dose_file)
            setattr(dvhs, 'ptv_number', rank_ptvs_by_D95(dvhs))
            sqlcnx.insert_dvhs(dvhs)
        if plan_file and struct_file:
            if IMPORT_LATEST_PLAN_ONLY:
                rxs = RxTable(plan_file, struct_file)
                sqlcnx.insert_rxs(rxs)
            else:
                for f in file_paths[uid]['rtplan']['file_path']:
                    sqlcnx.insert_rxs(RxTable(f, struct_file))

        # get mrn for folder name, can't assume a complete set of dose, plan, struct files
        mrn = []
        if dose_file:
            mrn = dicom.read_file(dose_file).PatientID
        elif plan_file:
            mrn = dicom.read_file(plan_file).PatientID
        elif struct_file:
            mrn = dicom.read_file(struct_file).PatientID
        if mrn:
            mrn = "".join(x for x in mrn
                          if x.isalnum())  # remove any special characters
        else:
            mrn = 'NoMRN'

        # convert file_paths[uid] into a list of file paths
        if move_files:
            files_to_move = []
            move_types = list(FILE_TYPES) + ['other']
            for file_type in move_types:
                files_to_move.extend(file_paths[uid][file_type]['file_path'])

            new_folder = os.path.join(import_settings['imported'], mrn)
            move_files_to_new_path(files_to_move, new_folder)

        if plan_file:
            plan_file = os.path.basename(plan_file)
        if struct_file:
            struct_file = os.path.basename(struct_file)
        if dose_file:
            dose_file = os.path.basename(dose_file)

        if update_dicom_catalogue_table:
            if not IMPORT_LATEST_PLAN_ONLY:
                plan_file = ', '.join([
                    os.path.basename(fp)
                    for fp in file_paths[uid]['rtplan']['file_path']
                ])
            update_dicom_catalogue(mrn, uid, new_folder, plan_file,
                                   struct_file, dose_file)

    # Move remaining files, if any
    if move_files:
        move_all_files(import_settings['imported'], import_settings['inbox'])
        remove_empty_folders(import_settings['inbox'])

    sqlcnx.close()

    end_time = datetime.now()
    print(str(end_time), 'Import complete', sep=' ')

    total_time = end_time - start_time
    seconds = total_time.seconds
    m, s = divmod(seconds, 60)
    h, m = divmod(m, 60)
    if h:
        print("This import took %dhrs %02dmin %02dsec to complete" % (h, m, s))
    elif m:
        print("This import took %02dmin %02dsec to complete" % (m, s))
    else:
        print("This import took %02dsec to complete" % s)