Esempio n. 1
0
    def default_patient_mask(self):
        self.patient_mask.StudyDate = ''  # (0008,0020)
        self.patient_mask.PatientName = ''  # (0010,0010)
        self.patient_mask.PatientBirthDate = ''  # (0010,0030)
        self.patient_mask.PatientID = ''  # (0010,0020)
        self.patient_mask.StudyDescription = ''  # (0008,1030)
        self.patient_mask.AccessionNumber = ''  # (0008,0050)
        self.patient_mask.ReferringPhysicianName = ''  # (0008,0090)
        self.patient_mask.add(DataElement(0x00081060, 'PN', ''))  # (0008,1060)
        self.patient_mask.add(DataElement(0x00080080, 'LO',
                                          ''))  # InstitutionName
        self.patient_mask.add(DataElement(0x00081050, 'PN',
                                          ''))  # PerformingPhysicianName

        self.patient_mask.StudyInstanceUID = ''
        self.patient_mask.QueryRetrieveLevel = 'STUDY'
Esempio n. 2
0
def create_attribute_list(attrs):
    ds = Dataset()
    ds.PatientID = '12345'
    ds.PatientName = 'Test^User'
    for attr in attrs:
        print('(VR, VM, Name, Retired, Keyword)\n', DicomDictionary.get(attr))  #
        didic = DicomDictionary.get(attr)
        elem = DataElement(attr, didic[0], '')
        ds.add(elem)
    return ds
Esempio n. 3
0
    def convert(self):
        # create file meta information
        file_meta = Dataset()
        file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.77.1.2'  # VL Microscopic Image Storage
        file_meta.MediaStorageSOPInstanceUID = "1.2.276.0.7230010.3.1.4.296485376.1.1484917438.721089"
        file_meta.ImplementationClassUID = "1.2.3.4"
        file_meta.FileMetaInformationVersion = b'\x00\x01'
        file_meta.FileMetaInformationGroupLength = len(file_meta)
        if self.JPEG_COMPRESS:
            # file_meta.TransferSyntaxUID = '1.2.840.10008.1.2.4.80'  # JPEG 2k
            # file_meta.TransferSyntaxUID = '1.2.840.10008.1.2.4.70'  # JPEG
            file_meta.TransferSyntaxUID = '1.2.840.10008.1.2.4.50'  # JPEG baseline
        else:
            file_meta.TransferSyntaxUID = '1.2.840.10008.1.2'  # default uncompressed

        # write data into Dicom instances
        self.instance_cnt = 0
        for frame_items_info in self.frame_items_info_list:
            print("Saving to instance %d/%d" %
                  (self.instance_cnt, len(self.frame_items_info_list)))
            # update relevant tags
            self.dcm_instance.InstanceNumber = self.instance_cnt
            self.dcm_instance.SeriesInstanceUID = '1.2.276.0.7230010.3.1.3.296485376.1.1484917433.721085.' + str(
                frame_items_info.img_level)
            self.dcm_instance.SeriesNumber = frame_items_info.img_level
            print(frame_items_info.img_level)
            # self.dcm_instance.SOPInstanceUID = self.dcm_instance.SOPInstanceUID + str(self.instance_cnt)
            self.dcm_instance.SOPInstanceUID = '1.2.276.0.7230010.3.1.4.296485376.1.1484917438.721089.' + str(
                self.instance_cnt)
            self.dcm_instance.NumberOfFrames = len(frame_items_info.locations)
            self.dcm_instance.TotalPixelMatrixColumns, self.dcm_instance.TotalPixelMatrixRows = self.wsi_obj.level_dimensions[
                frame_items_info.img_level]
            self.add_Frame_Sequence_data(frame_items_info)
            # create encoded pixel data
            PixelData_encoded = self.add_PixelData(frame_items_info)
            if self.JPEG_COMPRESS:
                filename = os.path.join(
                    self.save_to_dir,
                    "compressed_instance_" + str(self.instance_cnt) + ".dcm")
                data_elem_tag = pydicom.tag.TupleTag((0x7FE0, 0x0010))
                enc_frames = encapsulate(PixelData_encoded, has_bot=True)
                pd_ele = DataElement(data_elem_tag,
                                     'OB',
                                     enc_frames,
                                     is_undefined_length=True)
                self.dcm_instance.add(pd_ele)
            else:
                filename = os.path.join(
                    self.save_to_dir,
                    "instance_" + str(self.instance_cnt) + ".dcm")
                self.dcm_instance.PixelData = PixelData_encoded

            self.dcm_instance.file_meta = file_meta
            self.dcm_instance.save_as(filename, write_like_original=False)
            self.instance_cnt += 1
Esempio n. 4
0
def add_graphic_annotation(dicom, group_number, layer, type, origin, rows,
                           columns, data):
    dicom[group_number, 0x1001] = DataElement(Tag(group_number, 0x1001), "CS",
                                              layer)
    dicom[group_number, 0x40] = DataElement(Tag(group_number, 0x40), "CS",
                                            type)
    dicom[group_number, 0x50] = DataElement(Tag(group_number, 0x50), "SS",
                                            origin)
    dicom[group_number, 0x10] = DataElement(Tag(group_number, 0x10), "US",
                                            rows)
    dicom[group_number, 0x11] = DataElement(Tag(group_number, 0x11), "US",
                                            columns)
    dicom[group_number, 0x100] = DataElement(Tag(group_number, 0x100), "US",
                                             1)  # OverlayBitsAllocated
    dicom[group_number, 0x102] = DataElement(Tag(group_number, 0x102), "US",
                                             0)  # OverlayBitPosition
    dicom[group_number, 0x3000] = DataElement(Tag(group_number, 0x3000), "OW",
                                              data)
    return dicom
Esempio n. 5
0
def writeDicom(dataset, outfilename):

    datasetObj = json.loads(dataset.encode('GB18030').decode('iso8859'))

    # 读取模板Dicom文件
    filename = get_testdata_files('color-px.dcm')[0]
    ds = pydicom.dcmread(filename)

    # 当前时间
    dt = datetime.datetime.now()

    # print("Setting file meta information...")
    # Populate required values for file meta information
    # file_meta = ds.file_meta
    # file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
    # file_meta.MediaStorageSOPInstanceUID = "1.2.3"
    # file_meta.ImplementationClassUID = "1.2.3.4"
    # file_meta.TransferSyntaxUID = "1.2.840.10008.1.2.5"

    print("Setting dataset values...")
    # Create the FileDataset instance (initially no data elements, but file_meta supplied)
    # ds = FileDataset(filename, {}, file_meta=file_meta, preamble=b"\0" * 128)

    img = Image.open(datasetObj['ImagePixel']['PixelData'])
    nparr = np.asarray(img)

    # Pixel Data
    # ds[0x7FE0, 0x0010] = DataElement(0x7FE00010, 'OW', img.tobytes())
    ds.PixelData = nparr.tobytes()
    # Patient's Name
    ds[0x0010, 0x0010] = DataElement(0x00100010, 'PN',
                                     datasetObj['Patient']['PatientName'])
    # Patient ID
    ds[0x0010, 0x0020] = DataElement(0x00100020, 'LO',
                                     datasetObj['Patient']['PatientID'])
    # Patient's Birth Date
    ds[0x0010, 0x0030] = DataElement(0x00100030, 'DA',
                                     datasetObj['Patient']['PatientBirthDate'])
    # Patient's Sex
    ds[0x0010, 0x0040] = DataElement(0x00100040, 'CS',
                                     datasetObj['Patient']['PatientSex'])
    # Study Date
    ds[0x0008, 0x0020] = DataElement(0x00080020, 'DA',
                                     datasetObj['GeneralStudy']['StudyDate'])
    # Study Time
    ds[0x0008, 0x0030] = DataElement(0x00080030, 'TM',
                                     datasetObj['GeneralStudy']['StudyTime'])
    # Accession Number
    ds[0x0008,
       0x0050] = DataElement(0x00080050, 'SH',
                             datasetObj['GeneralStudy']['AccessionNumber'])
    # Study Instance UID
    # ds[0x0020, 0x000D] = DataElement(0x0020000D, 'UI', '1.2.840.1.2.8.236.511020181107')
    # Study ID
    ds[0x0020, 0x0010] = DataElement(0x00200010, 'SH',
                                     datasetObj['GeneralStudy']['StudyID'])
    # Patient's Age
    ds[0x0010, 0x1010] = DataElement(0x00101010, 'AS',
                                     datasetObj['PatientStudy']['PatientAge'])
    # Patient's Size
    ds[0x0010, 0x1020] = DataElement(0x00101020, 'DS',
                                     datasetObj['PatientStudy']['PatientSize'])
    # Patient's Weight
    ds[0x0010,
       0x1030] = DataElement(0x00101030, 'DS',
                             datasetObj['PatientStudy']['PatientWeight'])
    # Series Date
    ds[0x0008, 0x0021] = DataElement(0x00080021, 'DA', dt.strftime('%Y%m%d'))
    # Series Time
    ds[0x0008, 0x0031] = DataElement(0x00080031, 'TM',
                                     dt.strftime('%H%M%S.%f'))
    # Modality
    ds[0x0008, 0x0060] = DataElement(0x00080060, 'CS',
                                     datasetObj['GeneralSeries']['Modality'])
    # Series Description
    ds[0x0008, 0x103E] = DataElement(0x0008103E, 'LO', '')
    # Performing Physician's Name
    ds[0x0008, 0x1050] = DataElement(0x00081050, 'PN', '')
    # Body Part Examined
    ds[0x0018, 0x0015] = DataElement(0x00180015, 'CS', '')
    # Series Instance UID
    # ds[0x0020, 0x000E] = DataElement(0x0020000E, 'UI', '1.2.840.1.2.8.236.51102018110715')
    # Series Number
    ds[0x0020,
       0x0011] = DataElement(0x00200011, 'IS',
                             datasetObj['GeneralSeries']['SeriesNumber'])
    # Manufacturer
    ds[0x0008,
       0x0070] = DataElement(0x00080070, 'LO',
                             datasetObj['GeneralEquipment']['Manufacturer'])
    # Institution Name
    ds[0x0008,
       0x0080] = DataElement(0x00080080, 'LO',
                             datasetObj['GeneralEquipment']['InstitutionName'])
    # Image Type
    # ds[0x0008, 0x0008] = DataElement(0x00080008, 'CS', ['DERIVED', 'SECONDARY'])   # SECONDARY
    # Instance Number
    ds[0x0020,
       0x0013] = DataElement(0x00200013, 'IS',
                             datasetObj['GeneralImage']['InstanceNumber'])
    # Window Center
    ds[0x0028, 0x1050] = DataElement(0x00281050, 'DS',
                                     datasetObj['VOILUT']['WindowCenter'])
    # Window Width
    ds[0x0028, 0x1051] = DataElement(0x00281051, 'DS',
                                     datasetObj['VOILUT']['WindowWidth'])
    # # SOP Class UID
    # ds[0x0008, 0x0016] = DataElement(0x00080016, 'UI', '1.2.840.10008.5.1.4.1.1.7')
    # # SOP Instance UID
    # ds[0x0008, 0x0018] = DataElement(0x00080018, 'UI', '1.2.840.1.2.8.236.511020181107154038')
    # Bits Allocated
    ds[0x0028, 0x0100] = DataElement(0x00280100, 'US', 8)
    # Rows
    ds[0x0028, 0x0010] = DataElement(0x00280010, 'US', 480)
    # Columns
    ds[0x0028, 0x0011] = DataElement(0x00280011, 'US', 640)
    # Photometric Interpretation
    ds[0x0028, 0x0004] = DataElement(0x00280004, 'UI', 'RGB')
    # Samples Per Pixel
    ds[0x0028, 0x0002] = DataElement(0x00280002, 'US', 0)
    # Pixel Aspect Ratio
    ds[0x0028, 0x0034] = DataElement(0x00280034, 'IS', ['1', '1'])
    # Bits Stored
    ds[0x0028, 0x0101] = DataElement(0x00280101, 'US', 8)
    # High Bit
    ds[0x0028, 0x0102] = DataElement(0x00280102, 'US', 7)

    ds.ContentDate = dt.strftime('%Y%m%d')
    ds.ContentTime = dt.strftime('%H%M%S.%f')  # long format with micro seconds

    ds.save_as(outfilename)

    # pydicom.write_file(filename, ds)

    # print('Load file {} ...'.format(outfilename))
    # ds = pydicom.dcmread(outfilename)
    # print(str(ds).encode('iso8859').decode('iso8859'))

    print("Dicom文件:{} 生成成功!".format(outfilename))
Esempio n. 6
0
def interpolate_and_wrapup_rs(input_mrcnn_out, input_ct_filelist,
                              output_rs_filepath_after_ai, model_name):
    print('Use RS Template for with model_name = {}'.format(model_name))
    if model_name == "MRCNN_Brachy":
        #RS_TEMPLATE_FILEPATH = r"./ModelsAndRSTemplates/Brachy/RS_Template/RS.1.2.246.352.71.4.417454940236.267194.20190411111011.dcm"
        RS_TEMPLATE_FILEPATH = r"./ModelsAndRSTemplates/NewBrachy/RS_Template/RS.1.2.246.352.71.4.417454940236.267194.20190411111011.dcm"
    elif model_name == "MRCNN_Breast":
        RS_TEMPLATE_FILEPATH = r"./ModelsAndRSTemplates/Breast/RS_Template/RS.1.2.246.352.71.4.417454940236.244247.20190418132000.dcm"

    print("call interpolate_and_wrapup_rs()")
    print("with arg input_ct_filelist = {}".format(input_ct_filelist))
    print("with arg output_rs_filepath_after_ai = {}".format(
        output_rs_filepath_after_ai))
    import datetime
    import traceback
    from pydicom.dataset import Dataset, DataElement
    global CT_table
    global Thickness_table
    CT_table = {}
    Thickness_table = {}
    rs_fp = pydicom_read_rs_template(input_ct_filelist, RS_TEMPLATE_FILEPATH)
    ct_FrameOfReferenceUID = None
    ct_StudyInstnaceUID = None
    for filepath in input_ct_filelist:
        ct_fp = pydicom.read_file(filepath)
        ct_FrameOfReferenceUID = ct_fp.FrameOfReferenceUID
        ct_StudyInstanceUID = ct_fp.StudyInstanceUID
        break

    # ================== process_rs() start ===========================
    global SOPInstanceUID_list_of_CTs
    label_id_mask = input_mrcnn_out
    labels = list(label_id_mask.keys())

    # =================== Generate Color Mapping ===========================
    print("print(labels)=============================================")
    print(labels)
    # ['bowel', 'Uterus', 'HR-CTV', 'Sigmoid_colon', 'Rectum', 'Bladder', 'Foley']
    print("print(labels)=============================================")
    # For Brachy template, labels is like ['bowel', 'Uterus', 'HR-CTV', 'Sigmoid_colon', 'Rectum', 'Bladder', 'Foley']
    colors = []
    # disable code by milochen
    #rs_fp = pydicom.read_file(Env.RS_TEMPLATE_FILEPATH)

    # Make labels->colors mapping table by information of original template ADD by milochen
    # The output result should look like
    # ['bowel', 'Uterus', 'HR-CTV', 'Sigmoid_colon', 'Rectum', 'Bladder', 'Foley']
    #
    # [None, ['255', '255', '128'], ['0', '255', '0'], ['187', '255', '187'], ['128', '64', '64'], ['0', '255', '255'],['0', '150', '0'] ]

    for lbl_idx in range(len(labels)):
        lblName = labels[lbl_idx]
        lblName = lblName.split('RS_')[-1]
        # Find the ROI Number that its ROIName equal to label name lblName
        isFindMatchedROINumber = False
        matchedROINumber = None
        for idx in range(len(rs_fp.StructureSetROISequence)):
            item = rs_fp.StructureSetROISequence[idx]
            if item.ROIName == lblName:
                isFindMatchedROINumber = True
                matchedROINumber = item.ROINumber
                break
        if isFindMatchedROINumber == False:
            # not Matched , so ignore this stable
            print("isFindMatchedROINumber is False when labels[", lbl_idx,
                  "] = ", lblName)
            colors.append(None)
            continue

        # Find the ROI Display Color that its matchedROINumber equal to ReferencedROINumber
        isFindMatchedROIDisplayColor = False
        matchedROIDisplayColor = None
        #for idx in range(len(rs_fp.ROIContourSequence)):
        #    item = rs_fp.ROIContourSequence[idx]
        for idx, item in enumerate(rs_fp.ROIContourSequence):
            if item.ReferencedROINumber == matchedROINumber:
                isFindMatchedROIDisplayColor = True
                matchedROIDisplayColor = item.ROIDisplayColor
                break
        if isFindMatchedROIDisplayColor == True:
            print('find color work and color = {}'.format(
                matchedROIDisplayColor))
        else:
            print('color is not found')

        if isFindMatchedROIDisplayColor == False:
            print("isFindMatchedROIDisplayColor is False when labels[",
                  lbl_idx, "] = ", lblName)
            colors.append(None)
            continue
        colors.append(matchedROIDisplayColor)
    print('show colors')
    for idx, color in enumerate(colors):
        print('idx={} -> {}'.format(idx, color))

    # Make color mapping
    print('Make color mapping')
    colorMapping = {}
    for idx in range(len(labels)):
        lblName = labels[idx]
        lblName = lblName.split('RS_')[-1]  # cut off RS_ head if there is

        #colorMapping[ labels[idx] ] = colors[idx]
        #colorMapping[lblName_ignore_RS_Head] = colors[idx]
        colorMapping[lblName] = colors[idx]
        #print(labels[idx], "->", colors[idx])
        print(lblName, "->", colors[idx])

    print("colorMapping Research")
    print(labels)
    print(colorMapping)
    # The output table may look like this
    # bowel -> None
    # Uterus -> ['255', '255', '128']
    # HR - CTV -> ['0', '255', '0']
    # Sigmoid_colon -> ['187', '255', '187']
    # Rectum -> ['128', '64', '64']
    # Bladder -> ['0', '255', '255']
    # Foley -> ['0', '150', '0']
    #  check key for "if key in colorMapping"
    # Hightlight problem of code. ROINumber are change, so the order in original rs file is different to output rs file
    # The strategy of Sac's original code is to clean all of things and rewrite all.

    # =================== Generate Color Mapping ===========================

    ct_id = set()
    for ct_ids in label_id_mask.values():
        ct_id.update(ct_ids.keys())
    ct_id = list(ct_id)
    ct_id.sort()

    # fix bug of SeriesInstnaceUID, it should be a unique SeriesInstnaceUID
    # 1.2.246.352.71.2.417454940236.3986270.2019041110054111
    import datetime
    now = datetime.datetime.now()
    theDateForSeriesInstanceUID = now.strftime("%Y%m%d%H%M%S%f")
    rs_fp.SeriesInstanceUID = "1.2.246.352.71.2.417454940236.3986270." + theDateForSeriesInstanceUID
    rs_fp.ReferencedFrameOfReferenceSequence[
        0].FrameOfReferenceUID = ct_FrameOfReferenceUID
    rs_fp.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
        0].ReferencedSOPInstanceUID = rs_fp.StudyInstanceUID
    rs_fp.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
        0].RTReferencedSeriesSequence[0].ContourImageSequence.clear()

    print(
        "print_ct_id for check RTReferencedSeriesSequence ContourImageSequence"
    )
    print(ct_id)
    global SOPInstanceUID_list_of_CTs
    make_SOPInstnaceUID_list_of_CTs(input_ct_filelist)
    # Query here https://imagej.nih.gov/nih-image/download/nih-image_spin-offs/NucMed_Image/DICOM%20Dictionary
    #for _id in ct_id:
    for _id in SOPInstanceUID_list_of_CTs:
        #print("_id = ", _id)
        ds = Dataset()
        ds[0x0008, 0x1150] = DataElement(
            0x00081150, 'UI', 'CT Image Storage')  # Referenced SOP Class UID
        ds.ReferencedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
        ds[0x0008, 0x1155] = DataElement(0x00081155, 'UI',
                                         _id)  # Referenced SOP Instance UID
        rs_fp.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
            0].RTReferencedSeriesSequence[0].ContourImageSequence.append(ds)

    rs_fp.StructureSetROISequence.clear()

    for i in range(len(labels)):
        ds = Dataset()
        ds[0x3006, 0x0022] = DataElement(0x30060022, 'IS',
                                         str(i + 1))  # ROI Number
        ds[0x3006, 0x0024] = DataElement(
            0x30060024, 'UI',
            ct_FrameOfReferenceUID)  # Referenced Frame of Reference UID
        #ds[0x3006, 0x0026] = DataElement(0x30060026, 'LO', labels[i]) # ROI Name
        ds[0x3006, 0x0026] = DataElement(0x30060026, 'LO', labels[i].split(
            'RS_')[-1])  # ROI Name. if there is RS_ in head, then cut it off
        ds[0x3006, 0x0036] = DataElement(0x30060036, 'CS',
                                         'MANUAL')  # ROI Generation Algorithm
        rs_fp.StructureSetROISequence.append(ds)

    rs_fp.ROIContourSequence.clear()

    for i in range(len(labels)):
        dsss = Dataset()
        # changed by milochen
        # dsss[0x3006, 0x002a] = DataElement(0x3006002a, 'IS', color)
        # Draw color
        lblName = labels[i]
        if lblName.split('RS_')[-1] in colorMapping.keys():
            lblName_ignore_RS_Head = lblName.split('RS_')[-1]
            drawColor = colorMapping[lblName_ignore_RS_Head]
            print('drawColor = {}'.format(drawColor))
            #drawColor = colorMapping[lblName]
            #drawColor = colorMapping[lblName]
            if drawColor == None:
                #dsss[0x3006, 0x002a] = DataElement(0x3006002a, 'IS', [0, 0, 0]) # ROI Display Color
                the_color = [255, 255, 0]
                if lblName == "RS_PTV":
                    the_color = [255, 0, 0]  # Red
                elif lblName == "RS_CTV":
                    the_color = [0, 255, 0]  # Green
                dsss[0x3006,
                     0x002a] = DataElement(0x3006002a, 'IS',
                                           the_color)  # ROI Display Color
            else:
                dsss[0x3006,
                     0x002a] = DataElement(0x3006002a, 'IS',
                                           drawColor)  # ROI Display Color
        else:
            dsss[0x3006,
                 0x002a] = DataElement(0x3006002a, 'IS',
                                       [255, 255, 255])  # ROI Display Color

        dsss.ContourSequence = []
        dsss[0x3006, 0x0084] = DataElement(0x30060084, 'IS',
                                           str(i + 1))  # Referenced ROI Number
        rs_fp.ROIContourSequence.append(dsss)

        for _id in label_id_mask[labels[i]].keys():
            ds = Dataset()
            ds[0x0008,
               0x1150] = DataElement(0x00081150, 'UI',
                                     'CT Image Storage')  # SOP Class UID
            ds.ReferencedSOPClassUID = '1.2.840.10008.5.1.4.1.1.2'
            ds[0x0008, 0x1155] = DataElement(0x00081155, 'UI',
                                             _id)  # SOP Instance UID

            NumberOfContourPoints = str(
                int(len(label_id_mask[labels[i]][_id]) / 3))

            dss = Dataset()
            dss.ContourImageSequence = [ds]
            dss[0x3006, 0x0042] = DataElement(
                0x30060042, 'CS', 'CLOSED_PLANAR')  # Contour Geometric Type
            # dss[0x3006, 0x0046] = DataElement(0x30060046, 'IS', '664')
            dss[0x3006, 0x0046] = DataElement(
                0x30060046, 'IS',
                NumberOfContourPoints)  # Number Of Contour Points
            dss[0x3006, 0x0050] = DataElement(
                0x30060050, 'DS',
                label_id_mask[labels[i]][_id])  # Contour Data
            rs_fp.ROIContourSequence[i].ContourSequence.append(dss)

    rs_fp.RTROIObservationsSequence.clear()

    for i in range(len(labels)):
        dss = Dataset()
        dss[0x3006, 0x0082] = DataElement(0x30060082, 'IS', str(i + 1))
        dss[0x3006, 0x0084] = DataElement(0x30060084, 'IS', str(i + 1))
        dss[0x3006, 0x0085] = DataElement(0x30060085, 'SH', labels[i])
        dss[0x3006, 0x00a4] = DataElement(0x300600a4, 'CS', '')
        dss[0x3006, 0x00a6] = DataElement(0x300600a6, 'PN', '')

        rs_fp.RTROIObservationsSequence.append(dss)

    #rs_fp.SOPInstanceUID = "{}.{}".format(uid, 1)
    rs_fp.SOPInstanceUID = "{}.{}".format(ct_StudyInstanceUID, 1)
    rs_fp.ApprovalStatus = "UNAPPROVED"

    # ========

    #Start to Interpolate
    make_some_tables(input_ct_filelist)
    for i in range(len(rs_fp.ROIContourSequence)):
        try:
            item = rs_fp.ROIContourSequence[i].ContourSequence
            organ_exist_z = []
            # organ_exist_item = []
            last_z = 0
            for n in range(len(item)):
                get_z = float(item[n].ContourData[2])
                organ_exist_z.append(get_z)
                last_z = get_z
                # organ_exist_item.append(n)
            # organ_exist_z = np.array(organ_exist_z)
            max_Z = max(organ_exist_z)
            min_Z = min(organ_exist_z)
            print(organ_exist_z)

            thickness = Thickness_table[
                last_z]  # Actually the thickness from any exist z-slice are the same

            print('thickness = ', thickness)
            organ_table = [
                i for i in np.arange(min_Z, max_Z + thickness, thickness)
            ]
            print(organ_table)

            for p in range(len(organ_table)):
                z = organ_table[p]
                # print(z)
                # if organ_exist_z.index(z):    #z in organ_exist_z
                if z not in organ_exist_z:  # z in organ_exist_z
                    # print(z)
                    to_fill_z = find_nearest(organ_exist_z, z)
                    to_fill = organ_exist_z.index(to_fill_z)

                    print(z, '___', to_fill_z, to_fill)

                    # cpyItem = copy.deepcopy(rs_fp.ROIContourSequence[i].ContourSequence[to_fill])
                    to_fill_data = rs_fp.ROIContourSequence[i].ContourSequence[
                        to_fill]
                    deepcopy_data = copy.deepcopy(to_fill_data)
                    # rs_fp.ROIContourSequence[i].ContourSequence.append(to_fill_data)
                    rs_fp.ROIContourSequence[i].ContourSequence.append(
                        deepcopy_data)

                    # item.ContourSequence.....append(fill_z)
                    # edit

                    #rs_fp.ROIContourSequence[i].ContourSequence[-1].ContourImageSequence[0].ReferencedSOPInstanceUID = CT_table[to_fill_z]
                    rs_fp.ROIContourSequence[i].ContourSequence[
                        -1].ContourImageSequence[
                            0].ReferencedSOPInstanceUID = CT_table[z]

                    rs_fp.ROIContourSequence[i].ContourSequence[
                        -1].ContourData[2::3] = [z] * (int(
                            len(rs_fp.ROIContourSequence[i].
                                ContourSequence[-1].ContourData) / 3))

        except Exception as e:
            print("Exception")
            traceback.print_tb(e.__traceback__)
            #thickness = Thickness_table[last_z]
            print("last_z = ", last_z)
            print("Thickness_table = ", Thickness_table)

            #print(e)
    #set StructureSetLabel from RALCT_20190411 to RALCT_20190641_AI. The 20190614 is
    studyDate = lastGetDateFromCT
    # Change to 16 bytes because Varian cannot allow 17 bytes
    # rs_fp.StructureSetLabel = "RALCT_{}_AI".format(studyDate)
    rs_fp.StructureSetLabel = "RALCT_{}_A".format(studyDate)

    print("write_file for ", output_rs_filepath_after_ai)
    pydicom.write_file(output_rs_filepath_after_ai, rs_fp)

    return None
Esempio n. 7
0
def test_anonymise_dataset_and_all_is_anonymised_functions():

    # Create dict with one instance of every identifying keyword and
    # run basic anonymisation tests
    non_anon_dict = dict.fromkeys(IDENTIFYING_KEYWORDS)

    for key in non_anon_dict:
        non_anon_dict[key] = _get_non_anonymous_replacement_value(key)

    ds = dicom_dataset_from_dict(non_anon_dict)
    _check_is_anonymised_dataset_file_and_dir(ds, anon_is_expected=False)

    ds_anon = anonymise_dataset(ds)
    _check_is_anonymised_dataset_file_and_dir(ds_anon, anon_is_expected=True)

    # Test anonymisation (and check thereof) for each identifying
    # element individually.
    for elem in ds_anon.iterall():
        ds_single_non_anon_value = deepcopy(ds_anon)
        setattr(ds_single_non_anon_value, elem.keyword,
                _get_non_anonymous_replacement_value(elem.keyword))
        _check_is_anonymised_dataset_file_and_dir(ds_single_non_anon_value,
                                                  anon_is_expected=False)

        ds_single_anon = anonymise_dataset(ds_single_non_anon_value)
        _check_is_anonymised_dataset_file_and_dir(ds_single_anon,
                                                  anon_is_expected=True)

    # Test correct handling of private tags
    ds_anon.add(DataElement(0x0043102b, 'SS', [4, 4, 0, 0]))
    _check_is_anonymised_dataset_file_and_dir(ds_anon,
                                              anon_is_expected=False,
                                              ignore_private_tags=False)
    _check_is_anonymised_dataset_file_and_dir(ds_anon,
                                              anon_is_expected=True,
                                              ignore_private_tags=True)

    ds_anon.remove_private_tags()
    _check_is_anonymised_dataset_file_and_dir(ds_anon,
                                              anon_is_expected=True,
                                              ignore_private_tags=False)

    # Test blank anonymisation
    # # Sanity check
    _check_is_anonymised_dataset_file_and_dir(ds, anon_is_expected=False)

    ds_anon_blank = anonymise_dataset(ds, replace_values=False)
    _check_is_anonymised_dataset_file_and_dir(ds_anon_blank,
                                              anon_is_expected=True)

    # Test handling of unknown tags by removing PatientName from
    # baseline dict
    patient_name_tag = tag_for_keyword('PatientName')

    try:
        patient_name = BaselineDicomDictionary.pop(patient_name_tag)

        with pytest.raises(ValueError) as e_info:
            anonymise_dataset(ds)
        assert str(e_info).count("At least one of the non-private tags "
                                 "within your DICOM file is not within "
                                 "PyMedPhys's copy of the DICOM dictionary.")

        ds_anon_delete_unknown = anonymise_dataset(ds,
                                                   delete_unknown_tags=True)
        _check_is_anonymised_dataset_file_and_dir(ds_anon_delete_unknown,
                                                  anon_is_expected=True)
        with pytest.raises(AttributeError) as e_info:
            ds_anon_delete_unknown.PatientName
        assert str(e_info).count("'Dataset' object has no attribute "
                                 "'PatientName'")

        ds_anon_ignore_unknown = anonymise_dataset(ds,
                                                   delete_unknown_tags=False)
        _check_is_anonymised_dataset_file_and_dir(ds_anon_ignore_unknown,
                                                  anon_is_expected=True)
        assert patient_name_tag in ds_anon_ignore_unknown

    finally:
        BaselineDicomDictionary.setdefault(patient_name_tag, patient_name)

    # Test copy_dataset=False:
    anonymise_dataset(ds, copy_dataset=False)
    assert is_anonymised_dataset(ds)
Esempio n. 8
0
def test_anonymise_dataset_and_all_is_anonymised_functions(tmp_path):

    # Create dataset with one instance of every identifying keyword and
    # run basic anonymisation tests
    ds = Dataset()
    for keyword in IDENTIFYING_KEYWORDS:
        # Ignore file meta elements for now
        tag = hex(tag_for_keyword(keyword))
        if Tag(tag).group == 0x0002:
            continue

        value = _get_non_anonymous_replacement_value(keyword)
        setattr(ds, keyword, value)

    _check_is_anonymised_dataset_file_and_dir(ds, tmp_path, anon_is_expected=False)

    ds_anon = anonymise_dataset(ds)
    _check_is_anonymised_dataset_file_and_dir(ds_anon, tmp_path, anon_is_expected=True)

    # Test the anonymisation and check functions for each identifying
    # element individually.
    for elem in ds_anon.iterall():

        # TODO: AffectedSOPInstanceUID and RequestedSOPInstanceUID
        # are not writing to file. Investigate when UID anonymisation is
        # implemented.
        if elem.keyword in ("AffectedSOPInstanceUID", "RequestedSOPInstanceUID"):
            continue

        ds_single_non_anon_value = deepcopy(ds_anon)
        setattr(
            ds_single_non_anon_value,
            elem.keyword,
            _get_non_anonymous_replacement_value(elem.keyword),
        )
        _check_is_anonymised_dataset_file_and_dir(
            ds_single_non_anon_value, tmp_path, anon_is_expected=False
        )
        ds_single_anon = anonymise_dataset(ds_single_non_anon_value)
        _check_is_anonymised_dataset_file_and_dir(
            ds_single_anon, tmp_path, anon_is_expected=True
        )

    # Test correct handling of private tags
    ds_anon.add(DataElement(0x0043102B, "SS", [4, 4, 0, 0]))
    _check_is_anonymised_dataset_file_and_dir(
        ds_anon, tmp_path, anon_is_expected=False, ignore_private_tags=False
    )
    _check_is_anonymised_dataset_file_and_dir(
        ds_anon, tmp_path, anon_is_expected=True, ignore_private_tags=True
    )

    ds_anon.remove_private_tags()
    _check_is_anonymised_dataset_file_and_dir(
        ds_anon, tmp_path, anon_is_expected=True, ignore_private_tags=False
    )

    # Test blank anonymisation
    # # Sanity check
    _check_is_anonymised_dataset_file_and_dir(ds, tmp_path, anon_is_expected=False)

    ds_anon_blank = anonymise_dataset(ds, replace_values=False)
    _check_is_anonymised_dataset_file_and_dir(
        ds_anon_blank, tmp_path, anon_is_expected=True
    )

    # Test handling of unknown tags by removing PatientName from
    # baseline dict
    patient_name_tag = tag_for_keyword("PatientName")

    try:
        patient_name = BASELINE_DICOM_DICT.pop(patient_name_tag)

        with pytest.raises(ValueError) as e_info:
            anonymise_dataset(ds)
        assert str(e_info.value).count(
            "At least one of the non-private tags "
            "within your DICOM file is not within "
            "PyMedPhys's copy of the DICOM dictionary."
        )

        ds_anon_delete_unknown = anonymise_dataset(ds, delete_unknown_tags=True)
        _check_is_anonymised_dataset_file_and_dir(
            ds_anon_delete_unknown, tmp_path, anon_is_expected=True
        )
        with pytest.raises(AttributeError) as e_info:
            ds_anon_delete_unknown.PatientName
        assert str(e_info.value).count(
            "'Dataset' object has no attribute " "'PatientName'"
        )

        ds_anon_ignore_unknown = anonymise_dataset(ds, delete_unknown_tags=False)
        _check_is_anonymised_dataset_file_and_dir(
            ds_anon_ignore_unknown, tmp_path, anon_is_expected=True
        )
        assert patient_name_tag in ds_anon_ignore_unknown

    finally:
        BASELINE_DICOM_DICT.setdefault(patient_name_tag, patient_name)

    # Test copy_dataset=False:
    anonymise_dataset(ds, copy_dataset=False)
    assert is_anonymised_dataset(ds)