예제 #1
0
def store(host, port, calling_ae_title, called_ae_title, filenames):
    transfer_syntaxes = [
        odil.registry.ExplicitVRLittleEndian,
        odil.registry.ImplicitVRLittleEndian,
    ]

    # Find all SOP classes to negotiate at association time. We don't need to
    # read the whole data set for this
    sop_classes = set()
    for filename in filenames:
        with odil.open(filename) as stream:
            _, data_set = odil.Reader.read_file(
                stream,
                halt_condition=lambda tag: tag > odil.registry.SOPClassUID)
        sop_classes.update(data_set.as_string("SOPClassUID"))

    presentation_contexts = [
        odil.AssociationParameters.PresentationContext(
            2 * i + 1, sop_class, transfer_syntaxes,
            odil.AssociationParameters.PresentationContext.Role.SCU)
        for i, sop_class in enumerate(sop_classes)
    ]

    # Create the association and the Store SCU
    association = odil.Association()
    association.set_peer_host(host)
    association.set_peer_port(port)
    association.update_parameters()\
        .set_calling_ae_title(calling_ae_title)\
        .set_called_ae_title(called_ae_title)\
        .set_presentation_contexts(presentation_contexts)
    association.associate()

    negotiated_parameters = association.get_negotiated_parameters()
    negotiated_pc = negotiated_parameters.get_presentation_contexts()

    store = odil.StoreSCU(association)

    for filename in filenames:
        with odil.open(filename) as stream:
            _, data_set = odil.Reader.read_file(stream)

        try:
            store.set_affected_sop_class(data_set)
            store.store(data_set)
        except Exception as e:
            print("Could not store {}: {}".format(filename, e))

    association.release()
예제 #2
0
파일: store.py 프로젝트: lamyj/odil
def store(host, port, calling_ae_title, called_ae_title, filenames):
    transfer_syntaxes = [
        odil.registry.ExplicitVRLittleEndian,
        odil.registry.ImplicitVRLittleEndian,
    ]
    
    # Find all SOP classes to negotiate at association time. We don't need to
    # read the whole data set for this
    sop_classes = set()
    for filename in filenames:
        with odil.open(filename) as stream:
            _, data_set = odil.Reader.read_file(
                stream,
                halt_condition=lambda tag: tag>odil.registry.SOPClassUID)
        sop_classes.update(data_set.as_string("SOPClassUID"))

    presentation_contexts = [
        odil.AssociationParameters.PresentationContext(
            2*i+1, sop_class, transfer_syntaxes, True, False)
        for i, sop_class in enumerate(sop_classes)
    ]
    
    # Create the association and the Store SCU
    association = odil.Association()
    association.set_peer_host(host)
    association.set_peer_port(port)
    association.update_parameters()\
        .set_calling_ae_title(calling_ae_title)\
        .set_called_ae_title(called_ae_title)\
        .set_presentation_contexts(presentation_contexts)
    association.associate()
    
    negotiated_parameters = association.get_negotiated_parameters()
    negotiated_pc = negotiated_parameters.get_presentation_contexts()

    store = odil.StoreSCU(association)
    
    for filename in filenames:
        with odil.open(filename) as stream:
            _, data_set = odil.Reader.read_file(stream)
        
        try:
            store.set_affected_sop_class(data_set)
            store.store(data_set)
        except Exception as e:
            print("Could not store {}: {}".format(filename, e))
    
    association.release()
예제 #3
0
파일: convert.py 프로젝트: lamyj/odil
def convert(input, output, transfer_syntax, item_length, use_group_length):
    with odil.open(input) as stream:
        header, data_set = odil.Reader.read_file(stream)

    to_remove = [
        "FileMetaInformationVersion",
        "MediaStorageSOPClassUID", "MediaStorageSOPInstanceUID", 
        "TransferSyntaxUID", 
        "ImplementationClassUID", "ImplementationVersionName"
    ]
    for name in to_remove:
        header.remove(getattr(odil.registry, name))

    odil.write(data_set, output, header, transfer_syntax, item_length, use_group_length)
예제 #4
0
    def __call__(self, data_set):
        if not os.path.isdir(self.root):
            os.makedirs(self.root)

        if self.iso_9660:
            filename = "{:08d}.dcm".format(1 + len(os.listdir(self.root)))
        else:
            filename = data_set.as_string("SOPInstanceUID")[0].decode()

        destination = os.path.join(self.root, filename)
        with odil.open(destination, "wb") as fd:
            odil.Writer.write_file(data_set, fd, odil.DataSet(),
                                   self.transfer_syntax)
        self.files.append(destination)
예제 #5
0
def convert(input, output, transfer_syntax, item_length, use_group_length):
    with odil.open(input) as stream:
        header, data_set = odil.Reader.read_file(stream)

    to_remove = [
        "FileMetaInformationVersion", "MediaStorageSOPClassUID",
        "MediaStorageSOPInstanceUID", "TransferSyntaxUID",
        "ImplementationClassUID", "ImplementationVersionName"
    ]
    for name in to_remove:
        header.remove(getattr(odil.registry, name))

    odil.write(data_set, output, header, transfer_syntax, item_length,
               use_group_length)
예제 #6
0
def print_(inputs, print_header, decode_uids):
    for input in inputs:
        logging.info("Printing {}".format(input))
        with odil.open(input) as stream:
            header, data_set = odil.Reader.read_file(stream)

        max_length = find_max_name_length(data_set)
        if print_header:
            max_length = max(max_length, find_max_name_length(header))

        if print_header:
            print_data_set(header, decode_uids, "", max_length,
                           odil.Value.Strings())
            print()
        print_data_set(data_set, decode_uids, "", max_length,
                       odil.Value.Strings())
예제 #7
0
파일: print_.py 프로젝트: lamyj/odil
def print_(inputs, print_header, decode_uids):
    for input in inputs:
        logging.info("Printing {}".format(input))
        with odil.open(input) as stream:
            header, data_set = odil.Reader.read_file(stream)

        max_length = find_max_name_length(data_set)
        if print_header:
            max_length = max(max_length, find_max_name_length(header))

        if print_header:
            print_data_set(
                header, decode_uids, "", max_length, odil.Value.Strings())
            print()
        print_data_set(
            data_set, decode_uids, "", max_length, odil.Value.Strings())
예제 #8
0
파일: test_reader.py 프로젝트: lamyj/odil
    def test_open_context(self):
        data = (128 * b"\x00" + b"DICM" + b"\x02\x00\x10\x00"
                b"UI"
                b"\x14\x00"
                b"1.2.840.10008.1.2.1\x00"
                b"\x10\x00\x10\x00"
                b"PN"
                b"\x08\x00"
                b"Foo^Bar ")

        fd, path = tempfile.mkstemp()
        os.write(fd, data)
        os.close(fd)

        try:
            with odil.open(path) as fd:
                header, data_set = odil.Reader.read_file(fd)
        finally:
            os.remove(path)
예제 #9
0
파일: dump.py 프로젝트: top501/odil
def main():
    parser = argparse.ArgumentParser(description="Print content of DICOM file")
    parser.add_argument("file")
    parser.add_argument("--header",
                        "-H",
                        action="store_true",
                        help="Print header")
    parser.add_argument("--decode-uids",
                        "-D",
                        action="store_true",
                        help="Display UID names")
    arguments = parser.parse_args()

    with odil.open(arguments.file) as stream:
        header, data_set = odil.Reader.read_file(stream)
    if arguments.header:
        print_data_set(header, arguments.decode_uids)
        print
    print_data_set(data_set, arguments.decode_uids)
예제 #10
0
    def run_client(self):
        command = [
            "getscu", "-ll", "error", "-P", "-k", "QueryRetrieveLevel=PATIENT",
            "-k", "PatientID=*", "-k", "PatientName", "+B", "localhost",
            "11113"
        ]

        retcode = subprocess.call(command)
        if retcode != 0:
            return []

        files = sorted(glob.glob("{}*".format(odil.uid_prefix)))
        data_sets = []
        for file_ in files:
            with odil.open(file_, "rb") as fd:
                data_sets.append(odil.Reader.read_file(fd)[1])
        for file_ in files:
            os.remove(file_)

        return data_sets
예제 #11
0
파일: test_find_scp.py 프로젝트: lamyj/odil
 def run_client(self):
     command = [
         "findscu",
         "-P", "-k", "QueryRetrieveLevel=PATIENT",
         "-k", "PatientID=*", "-k", "PatientName",
         "-q", "-X",
         "localhost", "11113"]
     
     retcode = subprocess.call(command)
     if retcode != 0:
         return []
     
     files = sorted(glob.glob("rsp*"))
     data_sets = []
     for file_ in files:
         with odil.open(file_, "rb") as fd:
             data_sets.append(odil.Reader.read_file(fd)[1])
     for file_ in files:
         os.remove(file_)
     
     return data_sets
예제 #12
0
    def test_respondDICOM(self):
        wado = odil.webservices.WADORSResponse()
        wado.set_data_sets(self.data_sets)
        wado.respond_dicom(odil.webservices.Utils.Representation.DICOM)
        self.assertEqual(wado.get_type(), odil.webservices.Utils.Type.DICOM)
        self.assertEqual(wado.get_representation(),
                         odil.webservices.Utils.Representation.DICOM)
        http = wado.get_http_response()

        msg = self._http_message_to_email_message(http)
        self.assertTrue(msg.is_multipart())

        i = 0
        for part in msg.walk():
            if part.get_content_type() == "application/dicom":
                file = tempfile.mkstemp()
                stream = io.FileIO(file[1], "wb")
                stream.write(part.get_payload(decode=True))
                stream.close()
                with odil.open(file[1], "rb") as stream:
                    ds = odil.Reader.read_file(stream)[1]
                self.assertEqual(self.data_sets[i], ds)
                i = i + 1
        self.assertEqual(i, len(self.data_sets))
예제 #13
0
def as_xml(input, output, pretty_print):
    with odil.open(input) as stream:
        _, data_set = odil.Reader.read_file(stream)
    with open(output, "w") as fd:
        xml = odil.as_xml(data_set, pretty_print)
        fd.write(xml)
예제 #14
0
def as_binary(input, output, transfer_syntax):
    with odil.open(input) as stream:
        _, data_set = odil.Reader.read_file(stream)
    with odil.open(output, "wb") as fd:
        odil.Writer.write_file(data_set, fd, transfer_syntax=transfer_syntax)
예제 #15
0
        def store(self, data_set):
            specific_character_set = odil.Value.Strings()
            if "SpecificCharacterSet" in data_set:
                specific_character_set = data_set.as_string(
                    "SpecificCharacterSet")
            as_unicode = lambda x: odil.as_unicode(x, specific_character_set)

            if layout == "flat":
                directory = ""
            elif layout == "tree":
                # Patient directory: <PatientName> or <PatientID>.
                patient_directory = None
                if "PatientName" in data_set and data_set.as_string(
                        "PatientName"):
                    patient_directory = data_set.as_string("PatientName")[0]
                else:
                    patient_directory = data_set.as_string("PatientID")[0]
                patient_directory = as_unicode(patient_directory)

                # Study directory: <StudyID>_<StudyDescription>, both parts are
                # optional. If both tags are missing or empty, default to a
                # numeric index based on StudyInstanceUID.
                study_directory = []
                if "StudyID" in data_set and data_set.as_string("StudyID"):
                    study_directory.append(data_set.as_string("StudyID")[0])
                if ("StudyDescription" in data_set
                        and data_set.as_string("StudyDescription")):
                    study_directory.append(
                        data_set.as_string("StudyDescription")[0])

                if not study_directory:
                    study_instance_uid = data_set.as_string(
                        "StudyInstanceUID")[0]
                    study_directory.append(
                        self._study_ids.setdefault(study_instance_uid,
                                                   1 + len(self._study_ids)))

                study_directory = "_".join(
                    as_unicode(x) for x in study_directory)

                # Study directory: <SeriesNumber>_<SeriesDescription>, both
                # parts are optional. If both tags are missing or empty, default
                # to a numeric index based on SeriesInstanceUID.
                series_directory = []
                if "SeriesNumber" in data_set and data_set.as_int(
                        "SeriesNumber"):
                    series_directory.append(
                        str(data_set.as_int("SeriesNumber")[0]))
                if ("SeriesDescription" in data_set
                        and data_set.as_string("SeriesDescription")):
                    series_directory.append(
                        data_set.as_string("SeriesDescription")[0])

                if not series_directory:
                    series_instance_uid = data_set.as_string(
                        "series_instance_uid")[0]
                    series_directory.append(
                        self._series_ids.setdefault(series_instance_uid,
                                                    1 + len(self._series_ids)))

                series_directory = "_".join(
                    as_unicode(x) for x in series_directory)

                if iso_9660:
                    patient_directory = to_iso_9660(patient_directory)
                    study_directory = to_iso_9660(study_directory)
                    series_directory = to_iso_9660(series_directory)
                directory = os.path.join(patient_directory, study_directory,
                                         series_directory)
                if not os.path.isdir(os.path.join(self.directory, directory)):
                    os.makedirs(os.path.join(self.directory, directory))
            else:
                raise NotImplementedError()

            self.stored.setdefault(directory, 0)

            if iso_9660:
                filename = "IM{:06d}".format(1 + self.stored[directory])
            else:
                filename = as_unicode(data_set.as_string("SOPInstanceUID")[0])

            with odil.open(os.path.join(self.directory, directory, filename),
                           "wb") as fd:
                odil.Writer.write_file(data_set, fd)

            self.stored[directory] += 1
            self.files.append(os.path.join(directory, filename))
예제 #16
0
파일: transcode.py 프로젝트: lamyj/odil
def as_xml(input, output, pretty_print):
    with odil.open(input) as stream:
        _, data_set = odil.Reader.read_file(stream)
    with open(output, "w") as fd:
        xml = odil.as_xml(data_set, pretty_print)
        fd.write(xml)
예제 #17
0
파일: transcode.py 프로젝트: lamyj/odil
def as_binary(input, output, transfer_syntax):
    with odil.open(input) as stream:
        _, data_set = odil.Reader.read_file(stream)
    with odil.open(output, "wb") as fd:
        odil.Writer.write_file(data_set, fd, transfer_syntax=transfer_syntax)
예제 #18
0
파일: get.py 프로젝트: lamyj/odil
        def store(self, data_set):
            specific_character_set = odil.Value.Strings()
            if "SpecificCharacterSet" in data_set:
                specific_character_set = data_set.as_string(
                    "SpecificCharacterSet")
            as_unicode = lambda x: odil.as_unicode(x, specific_character_set)
            
            if layout == "flat":
                directory = ""
            elif layout == "tree":
                # Patient directory: <PatientName> or <PatientID>. 
                patient_directory = None
                if "PatientName" in data_set and data_set.as_string("PatientName"):
                    patient_directory = data_set.as_string("PatientName")[0]
                else:
                    patient_directory = data_set.as_string("PatientID")[0]
                patient_directory = as_unicode(patient_directory)
                
                # Study directory: <StudyID>_<StudyDescription>, both parts are
                # optional. If both tags are missing or empty, default to a 
                # numeric index based on StudyInstanceUID.
                study_directory = []
                if "StudyID" in data_set and data_set.as_string("StudyID"):
                    study_directory.append(data_set.as_string("StudyID")[0])
                if ("StudyDescription" in data_set and
                        data_set.as_string("StudyDescription")):
                    study_directory.append(
                        data_set.as_string("StudyDescription")[0])
                
                if not study_directory:
                    study_instance_uid = data_set.as_string("StudyInstanceUID")[0]
                    study_directory.append(
                        self._study_ids.setdefault(
                            study_instance_uid, 1+len(self._study_ids)))
                    
                study_directory = "_".join(as_unicode(x) for x in study_directory)

                # Study directory: <SeriesNumber>_<SeriesDescription>, both 
                # parts are optional. If both tags are missing or empty, default 
                # to a numeric index based on SeriesInstanceUID.
                series_directory = []
                if "SeriesNumber" in data_set and data_set.as_int("SeriesNumber"):
                    series_directory.append(str(data_set.as_int("SeriesNumber")[0]))
                if ("SeriesDescription" in data_set and
                        data_set.as_string("SeriesDescription")):
                    series_directory.append(
                        data_set.as_string("SeriesDescription")[0])
                
                if not series_directory:
                    series_instance_uid = data_set.as_string("series_instance_uid")[0]
                    series_directory.append(
                        self._series_ids.setdefault(
                            series_instance_uid, 1+len(self._series_ids)))
                
                series_directory = "_".join(as_unicode(x) for x in series_directory)

                if iso_9660:
                    patient_directory = to_iso_9660(patient_directory)
                    study_directory = to_iso_9660(study_directory)
                    series_directory = to_iso_9660(series_directory)
                directory = os.path.join(
                    patient_directory, study_directory, series_directory)
                if not os.path.isdir(os.path.join(self.directory, directory)):
                    os.makedirs(os.path.join(self.directory, directory))
            else:
                raise NotImplementedError()

            self.stored.setdefault(directory, 0)

            if iso_9660:
                filename = "IM{:06d}".format(1+self.stored[directory])
            else:
                filename = as_unicode(data_set.as_string("SOPInstanceUID")[0])
            
            with odil.open(os.path.join(self.directory, directory, filename), "wb") as fd:
                odil.Writer.write_file(data_set, fd)

            self.stored[directory] += 1
            self.files.append(os.path.join(directory, filename))