Beispiel #1
0
 def test_recursive_load():
     path = full_file_path(__file__)
     file_list = recursive_list(path)
     print(file_list)
     assert (
         len(file_list) > 5
     )  # the current files within the source code, it has at least 6 files in PythonUtils.
Beispiel #2
0
 def decompress_folder(input_folder):
     """
     A wrapped function call to filelist to enable decompression of entire folder. @todo: it does not check for DICOM file presence, merely all files.
     :param input_folder:
     :return:
     """
     files_list = recursive_list(input_folder)
     DICOM_decompress.filelist(files_list)
Beispiel #3
0
 def folder(input_folder: str, new_ID: str) -> List[str]:
     """
     Iterate through a folder and anonymize everything in the folder that is DICOM.
     DICOM file check happens at the lowest DICOM_element level.
     :param input_folder:
     :param new_ID:
     :return:
     """
     files_list = recursive_list(input_folder)
     list_bad_files = DICOM_anonymize.filelist(files_list, new_ID)
     return list_bad_files
Beispiel #4
0
    def test_element_retrieval():
        # Get all files with JPEG in them.
        file_names = recursive_list(r"C:\Users\Yang Ding\Desktop\test")

        # files = get_testdata_files("[Jj][Pp][Ee][Gg]")
        for file in file_names:
            A, _ = DICOM_elements.retrieve(file,
                                           "StudyDescription")  # can be ''
            B, _ = DICOM_elements.retrieve(
                file, "PatientBirthDate")  # can be 1995-12-19
            C, _ = DICOM_elements.retrieve(file, "PatientSex")  # can be M or F
            assert A and B and C
Beispiel #5
0
def predict_folder(path_model_weights: str, input_folder: str):
    """
    Folder version of the prediction function provided above.
    :param input_model:
    :param input_folder:
    :param target_size:
    :return:
    """

    # Load model
    assert os.path.exists(path_model_weights)

    # Load files that predictions will be run upon.
    assert os.path.exists(input_folder)

    # with CustomObjectScope({'BilinearUpsampling': BilinearUpsampling}):
    input_model = deeplabv3_plus(input_shape=(240, 320, 3), num_classes=1)
    input_model.load_weights(path_model_weights)

    list_files = recursive_list(input_folder)

    for file in tqdm(list_files):

        # skip if it is not amn image.
        if (
            "JPEG" not in file.upper()
            and "PNG" not in file.upper()
            and "JPG" not in file.upper()
        ):
            continue

        # predicting multiple images at once
        img = image.load_img(file)

        x = image.img_to_array(img)
        x = np.expand_dims(
            x, axis=0
        )  # pad the x array dimension to conform to 4D tensor.

        # pass the list of multiple images np.vstack()
        images = np.vstack([x])
        output = input_model.predict(images)

        gray_scale_matrix = np.squeeze(output).astype(np.uint8)
        img = Image.fromarray(gray_scale_matrix)
        # Path(input_image).Parent.join("Output_" + Path(input_image).name)
        img.save(file + "_Output.jpg")

    write_JSON_records(path_model_weights, list_files, input_folder)

    print("Test folder prediction completed. ")
Beispiel #6
0
def upload_retrospective_study(path_study_root_folder_path):
    """
    Sample script to recursively import in Orthanc all the DICOM files
    that are stored in some path. Please make sure that Orthanc is running
    before starting this script. The files are uploaded through the REST
    API.

    Usage: %s [hostname] [HTTP port] [path]
    Usage: %s [hostname] [HTTP port] [path] [username] [password]
    For instance: %s 127.0.0.1 8042 .

    :param path_study_root_folder_path:
    :return: 1
    """

    success_count = 0
    total_file_count = 0

    if os.path.isfile(path_study_root_folder_path):
        # Upload a single file
        total_file_count = 1
        success_count = read_upload_file(path_study_root_folder_path)
    else:
        # Recursively upload a directory
        list_files = recursive_list(path_study_root_folder_path)

        total_file_count = len(list_files)

        # Serial process them:
        # for file in tqdm(list_files):
        #    if read_upload_file(file):
        #        success_count += 1

        # Parallel process them
        num_cores = multiprocessing.cpu_count()

        # Store the output in a list
        results = Parallel(n_jobs=num_cores)(delayed(read_upload_file)(i)
                                             for i in list_files)

        # Count success.
        success_count = results.count(True)

    if success_count == total_file_count:
        logger.info(
            "\nSummary: all %d DICOM file(s) have been imported successfully" %
            success_count)
    else:
        logger.warning(
            "\nSummary: %d out of %d files have been imported successfully as DICOM instances"
            % (success_count, total_file_count))
Beispiel #7
0
 def raw(input_folder, output_folder):
     """
     FLATCOPY (with timestampe to avoid naming duplications) to a an output folder with raw prefix.
     :param input_folder:
     :param output_folder:
     :return:
     """
     os.chdir(output_folder)
     os.mkdir("raw")
     logger.info("Backup up untouched raw files:")
     path_raw = os.path.join(output_folder, "raw")
     file_list = recursive_list(input_folder)
     flatcopy(file_list, path_raw, DICOM_validate.file)
     logger.info("Raw backup completed!")
Beispiel #8
0
    def load_images(self, image_path):
        """
        Load all images from image path as X_train
        :param image_path: path to the folder which contain all images.
        :return: np array of X-train
        """
        X_train = []

        # Load all files from the image path using Image.open.
        for i in recursive_list(image_path):
            # Open images as ???
            img = Image.open(i)
            # Convert to NP array.
            img = np.asarray(img)
            # Append them into higher order array.
            if img.shape == (128, 128, 3):
                X_train.append(img)

        # return all the images concatenated as a 4D array
        return np.asarray(X_train)
Beispiel #9
0
def BatchDateCalculation(path):
    file_list = recursive_list(path)
    for file in file_list:

        if DICOM_validate.file(file):

            # Try to extract these information from the files.
            success1, StudyDate = DICOM_elements.retrieve(file, "StudyDate")
            success2, PatientBirthDate = DICOM_elements.retrieve(
                file, "PatientBirthDate")
            success3, age = DICOM_elements.compute_age(file)

            # Skip this file if it is not successful.
            if not success1 or not success2 or not success3:
                continue

            # Print, store and append the information acquired.
            A = [file, StudyDate, PatientBirthDate, str(age)]
            print(A)
            output.append(A)

    with open("output.csv", "w") as resultFile:
        wr = csv.writer(resultFile, dialect="excel")
        wr.writerow(output)
Beispiel #10
0
    def into_folder(input_folder, output_folder):
        """
        This function sort a input folder with or without sub layers and automaticlly flatten everything into a folder before then MOVING them into protocol based folder sorted by acquisition series.
        :param input_folder: Input_folder can be a root folder or flat.
        :return:
        """

        # Element to check: Series number.

        from PythonUtils.PUFile import flatcopy
        from PythonUtils.PUFolder import recursive_list

        # Get files
        file_list = recursive_list(input_folder)

        if not os.path.isdir(output_folder):
            os.mkdir(output_folder)

        # copy them to a flat structure to the output folder.
        flatcopy(file_list, output_folder, DICOM_validate.file)

        # decompress them if necessary.
        # oshelper_files.decompress_folder(output_folder)

        # Get files list again.
        file_list = recursive_list(output_folder)

        exception_encountered = 0

        logger.info("Sorting files into folders:")

        # File here should be the FULL path.
        for file in tqdm(file_list, position=0):

            success1, SeriesNumber = DICOM_elements.retrieve(
                file, "SeriesNumber")

            success2, SeriesDescription = DICOM_elements.retrieve(
                file, "SeriesDescription")

            if not success1 or not success2:
                logger.info(
                    f"Skipped file with no acquisition series information: {file}"
                )
                exception_encountered = exception_encountered + 1
                continue

            # Check MRI Series folder exists
            DestinationFolder = str(SeriesNumber) + "_" + SeriesDescription
            DestinationFolder = DestinationFolder.replace(" ", "_")
            DestinationFolder = DestinationFolder.replace(":", "_")
            DestinationFolder = DestinationFolder.replace(r"/", "_")
            DestinationFolder = DestinationFolder.replace(r"\\", "_")

            # Make destination folder if not exist.
            os.chdir(output_folder)
            if not os.path.exists(DestinationFolder):
                os.mkdir(DestinationFolder)

            # Get file name.
            _, filename = os.path.split(file)

            shutil.move(file, os.path.join(DestinationFolder, filename))
        logger.info(f"Total error encountered: {str(exception_encountered)}")
Beispiel #11
0
    def traversal(dir_path: str, consistency_check: bool = True):
        """
        Some basic information of the participants must be consistent across the files, such as the SCAN DATE (assuming they are not scanning across MIDNIGHT POINT)
        Birthday date, subject name, etc MUST BE CONSISTENT across a SINGLE subject's folder, RIGHT!

        :param dir_path:
        :returns: 0) if the path is valid, 2) list of ONLY the valid DICOM files.
        """
        from DICOMTransit.DICOM.validate import DICOM_validate

        # Reject bad input check
        if not os.path.exists(dir_path) or not os.path.isdir(dir_path):
            logger.error("Bad data folder path")
            return False, None

        # Get all possible files from the there.
        files = recursive_list(dir_path)

        # Used to record the first encountered patientID and name, and will check against subsequent folder for same matching information.
        PatientID = ""
        PatientName = ""

        # List to store all validated DICOM files.
        validated_DICOM_files = []

        from DICOMTransit.DICOM.elements import DICOM_elements

        logger.info(
            "Traversing individual dicom file for validation information.")

        list_unique_sUID = []
        previous_sUID = None  # a shorthand to bypass the list check.
        # Check individual DICOM file for consistencies.
        for file in tqdm(files, position=0):

            # Skip current file if they are not DICOM files.
            is_DICOM, dicom_obj = DICOM_validate.file(file)

            if not is_DICOM:
                logger.error(
                    f"Bad DICOM files detected: {file}. They are not returned in the validated list!"
                )

                continue

            # The following section checks individual files and determine if all files have consistency name/patient etc.
            # Useful for unanticipated ZIP files which can be contaminated.
            # Not useful when dealing with ORTHANC output files.

            if consistency_check:
                # @todo: what if one of them is NONE?
                # @todo: what if the date and other things are inconsistent?
                # Record first instance of patient ID and patient name.
                if PatientID == "" and PatientName == "":
                    Success, PatientID = DICOM_elements.retrieve_fast(
                        dicom_obj, "PatientID")
                    Success, PatientName = DICOM_elements.retrieve_fast(
                        dicom_obj, "PatientName")

                    # raise issue if not successful
                    if not Success:
                        logger.error(
                            "DICOM meta data retrieval failure EVEN for the first DICOM FILE?! Checking next one."
                        )
                    else:
                        name = PatientName.original_string.decode("latin_1")
                        logger.debug(
                            f"DICOM meta data retrieval success: {PatientID} {name}"
                        )

                    # Regardless of success of failure, must continue to process the next file.
                    continue

                # Check consistencies across folders in terms of patient ID, NAME.
                Success1, CurrentPatientID = DICOM_elements.retrieve_fast(
                    dicom_obj, "PatientID")
                Success2, CurrentPatientName = DICOM_elements.retrieve_fast(
                    dicom_obj, "PatientName")

                if not Success1 or not Success2:
                    logger.error(
                        "Could not retrieve fields for comparison. At least ONE DICOM file has inconsistent Patient ID/NAME field."
                    )
                    return False, None

                if not (PatientID == CurrentPatientID) or not (
                        PatientName == CurrentPatientName):
                    logger.info(
                        "PatientID or Name mismatch from the dicom archive. .")
                    return False, None

            success, UID = DICOM_elements.retrieve_fast(
                dicom_obj, "SeriesInstanceUID")

            # A quick UID check before the HEAVY list operation.
            if not UID == previous_sUID and UID not in list_unique_sUID:
                list_unique_sUID.append(UID)

            validated_DICOM_files.append(file)
            previous_sUID = UID

        return True, validated_DICOM_files, list_unique_sUID
    """

    for specific_model in tqdm(path_models):
        # Skip model if WEIGHT is not in the file name.
        if not "WEIGHT" in specific_model.upper():
            continue

        # Pathout + Timestamp + ModelFileName.
        path_output_model = path_output / os.path.basename(specific_model)
        create(path_output_model)
        # Copy all test data to the destination.
        flatcopy(path_input, path_output_model)

        # Run the prediction of the model against content from that folder.
        predict_folder(specific_model, str(path_output_model))

    print("Batch modelS predictions completed!")


if __name__ == "__main__":
    models = recursive_list(
        r"C:\Users\Yang Ding\Desktop\Stockwell\model_test_folder\models_v2")
    path_output = Path(
        r"C:\Users\Yang Ding\Desktop\Stockwell\model_test_folder\Test_Automated\model_v2"
    )
    predict_folder_batch(
        models,
        r"C:\Users\Yang Ding\Desktop\Stockwell\model_test_folder\Image_TestSet",
        path_output,
    )