Exemplo n.º 1
0
def main():
    st.set_option("deprecation.showfileUploaderEncoding", False)

    uploaded_file_buffer_list = st.file_uploader(
        "Files to pseudonymise, refresh page after downloading zip(s)",
        ["dcm"],
        accept_multiple_files=True,
    )

    if st.button("Pseudonymise", key="PseudonymiseButton"):
        pseudonymise_buffer_list(uploaded_file_buffer_list)
        uploaded_file_buffer_list.clear()
Exemplo n.º 2
0
def dicom_input_method(  # pylint: disable = too-many-return-statements
        key_namespace="",
        patient_id="",
        **_):
    FILE_UPLOAD = "File upload"
    MONACO_SEARCH = "Search Monaco file export location"

    dicom_export_locations = get_dicom_export_locations()

    import_method = st.radio(
        "DICOM import method",
        [FILE_UPLOAD, MONACO_SEARCH],
        key=f"{key_namespace}_dicom_file_import_method",
    )

    if import_method == FILE_UPLOAD:
        dicom_plan_bytes = st.file_uploader(
            "Upload DICOM RT Plan File",
            key=f"{key_namespace}_dicom_plan_uploader")

        if dicom_plan_bytes is None:
            return {}

        try:
            dicom_plan = pydicom.read_file(dicom_plan_bytes, force=True)
        except:  # pylint: disable = bare-except
            st.write(WrongFileType("Does not appear to be a DICOM file"))
            return {}

        if dicom_plan.SOPClassUID != DICOM_PLAN_UID:
            st.write(
                WrongFileType(
                    "The DICOM type needs to be an RT DICOM Plan file"))
            return {}

        data_paths = ["Uploaded DICOM file"]

    if import_method == MONACO_SEARCH:
        site_options = list(dicom_export_locations.keys())
        monaco_site = st.radio("Monaco Export Location",
                               site_options,
                               key=f"{key_namespace}_monaco_site")
        monaco_export_directory = dicom_export_locations[monaco_site]
        st.write(monaco_export_directory.resolve())

        patient_id = st.text_input("Patient ID",
                                   patient_id,
                                   key=f"{key_namespace}_patient_id")

        found_dicom_files = list(
            monaco_export_directory.glob(f"{patient_id}_*.dcm"))

        dicom_plans = {}

        for path in found_dicom_files:
            dcm = load_dicom_file_if_plan(path)
            if dcm is not None:
                dicom_plans[path.name] = dcm

        dicom_plan_options = list(dicom_plans.keys())

        if len(dicom_plan_options) == 0 and patient_id != "":
            st.write(
                NoRecordsFound(
                    f"No exported DICOM RT plans found for Patient ID {patient_id} "
                    f"within the directory {monaco_export_directory}"))
            return {"patient_id": patient_id}

        if len(dicom_plan_options) == 1:
            selected_plan = dicom_plan_options[0]
        else:
            selected_plan = st.radio(
                "Select DICOM Plan",
                dicom_plan_options,
                key=f"{key_namespace}_select_monaco_export_plan",
            )

        f"DICOM file being used: `{selected_plan}`"

        dicom_plan = dicom_plans[selected_plan]
        data_paths = [monaco_export_directory.joinpath(selected_plan)]

    patient_id = str(dicom_plan.PatientID)
    f"Patient ID: `{patient_id}`"

    patient_name = str(dicom_plan.PatientName)
    patient_name = utl_patient.convert_patient_name(patient_name)

    f"Patient Name: `{patient_name}`"

    rt_plan_name = str(dicom_plan.RTPlanName)
    f"Plan Name: `{rt_plan_name}`"

    try:
        deliveries_all_fractions = pymedphys.Delivery.from_dicom(
            dicom_plan, fraction_number="all")
    except AttributeError:
        st.write(WrongFileType("Does not appear to be a photon DICOM plan"))
        return {}

    fractions = list(deliveries_all_fractions.keys())
    if len(fractions) == 1:
        delivery = deliveries_all_fractions[fractions[0]]
    else:
        fraction_choices = {}

        for fraction, delivery in deliveries_all_fractions.items():
            rounded_mu = round(delivery.mu[-1], 1)

            fraction_choices[
                f"Perscription {fraction} with {rounded_mu} MU"] = fraction

        fraction_selection = st.radio(
            "Select relevant perscription",
            list(fraction_choices.keys()),
            key=f"{key_namespace}_dicom_perscription_chooser",
        )

        fraction_number = fraction_choices[fraction_selection]
        delivery = deliveries_all_fractions[fraction_number]

    deliveries = [delivery]

    identifier = f"DICOM ({rt_plan_name})"

    return {
        "patient_id": patient_id,
        "patient_name": patient_name,
        "data_paths": data_paths,
        "identifier": identifier,
        "deliveries": deliveries,
    }
Exemplo n.º 3
0
def trf_input_method(patient_id="", key_namespace="", **_):
    """Streamlit GUI method to facilitate TRF data provision.

    Notes
    -----
    TRF files themselves have no innate patient alignment. An option
    for TRF collection is to use the CLI tool
    ``pymedphys trf orchestrate``. This connects to the SAMBA server
    hosted on the Elekta NSS and downloads the diagnostic backup zips.
    It then takes these TRF files and queries the Mosaiq database using
    time of delivery to identify these with a patient id (Ident.Pat_ID1)
    and name.

    As such, all references to patient ID and name within this
    ``trf_input_method`` are actually a reference to their Mosaiq
    database counterparts.
    """
    FILE_UPLOAD = "File upload"
    INDEXED_TRF_SEARCH = "Search indexed TRF directory"

    import_method = st.radio(
        "TRF import method",
        [FILE_UPLOAD, INDEXED_TRF_SEARCH],
        key=f"{key_namespace}_trf_file_import_method",
    )

    if import_method == FILE_UPLOAD:
        selected_files = st.file_uploader(
            "Upload TRF files",
            key=f"{key_namespace}_trf_file_uploader",
            accept_multiple_files=True,
        )

        if not selected_files:
            return {}

        data_paths = []
        individual_identifiers = ["Uploaded TRF file(s)"]

    if import_method == INDEXED_TRF_SEARCH:
        try:
            indexed_trf_directory = _config.get_indexed_trf_directory()
        except KeyError:
            st.write(
                _exceptions.ConfigMissing(
                    "No indexed TRF directory is configured. Please use "
                    f"'{FILE_UPLOAD}' instead."))
            return {}

        patient_id = st.text_input("Patient ID",
                                   patient_id,
                                   key=f"{key_namespace}_patient_id")
        st.write(patient_id)

        filepaths = list(
            indexed_trf_directory.glob(f"*/{patient_id}_*/*/*/*/*.trf"))

        raw_timestamps = [
            "_".join(path.parent.name.split("_")[0:2]) for path in filepaths
        ]
        timestamps = list(
            pd.to_datetime(raw_timestamps,
                           format="%Y-%m-%d_%H%M%S").astype(str))

        timestamp_filepath_map = dict(zip(timestamps, filepaths))

        timestamps = sorted(timestamps, reverse=True)

        if len(timestamps) == 0:
            if patient_id != "":
                st.write(
                    _exceptions.NoRecordsFound(
                        f"No TRF log file found for patient ID {patient_id}"))
            return {"patient_id": patient_id}

        if len(timestamps) == 1:
            default_timestamp = timestamps[0]
        else:
            default_timestamp = []

        selected_trf_deliveries = st.multiselect(
            "Select TRF delivery timestamp(s)",
            timestamps,
            default=default_timestamp,
            key=f"{key_namespace}_trf_deliveries",
        )

        if not selected_trf_deliveries:
            return {}

        st.write("""
            #### TRF filepath(s)
            """)

        selected_files = [
            timestamp_filepath_map[timestamp]
            for timestamp in selected_trf_deliveries
        ]
        st.write([str(path.resolve()) for path in selected_files])

        individual_identifiers = [
            f"{path.parent.parent.parent.parent.name} {path.parent.name}"
            for path in selected_files
        ]

        data_paths = selected_files

    st.write("""
        #### Log file header(s)
        """)

    headers = []
    tables = []
    for path_or_binary in selected_files:
        try:
            path_or_binary.seek(0)
        except AttributeError:
            pass

        header, table = read_trf(path_or_binary)
        headers.append(header)
        tables.append(table)

    headers = pd.concat(headers)
    headers.reset_index(inplace=True)
    headers.drop("index", axis=1, inplace=True)

    st.write(headers)

    deliveries = _deliveries.cached_deliveries_loading(
        tables, _deliveries.delivery_from_trf)

    identifier = f"TRF ({individual_identifiers[0]})"

    patient_name = _attempt_patient_name_from_mosaiq(headers)

    return {
        "site": None,
        "patient_id": patient_id,
        "patient_name": patient_name,
        "data_paths": data_paths,
        "identifier": identifier,
        "deliveries": deliveries,
    }