Exemplo n.º 1
0
    def test_write_file_meta(self):
        """pydicom-independent test confirming correct write."""
        request = C_STORE()
        request.AffectedSOPClassUID = '1.2'
        request.AffectedSOPInstanceUID = '1.3'
        request.DataSet = BytesIO(b'\x00\x01')

        event = Event(None, evt.EVT_C_STORE, {
            'request': request,
            'context': self.context.as_tuple
        })
        fp = BytesIO()
        meta = event.file_meta
        write_file_meta_info(fp, meta)
        bs = fp.getvalue()
        assert bs[:12] == b'\x02\x00\x00\x00\x55\x4c\x04\x00\x7e\x00\x00\x00'
        assert bs[12:76] == (
            b'\x02\x00\x01\x00\x4f\x42\x00\x00\x02\x00\x00\x00\x00\x01'
            b'\x02\x00\x02\x00\x55\x49\x04\x00\x31\x2e\x32\x00'
            b'\x02\x00\x03\x00\x55\x49\x04\x00\x31\x2e\x33\x00'
            b'\x02\x00\x10\x00\x55\x49\x12\x00\x31\x2e\x32\x2e\x38\x34'
            b'\x30\x2e\x31\x30\x30\x30\x38\x2e\x31\x2e\x32\x00')

        # Note: may not be 126 if Implementation Class and Version change
        assert 126 == meta.FileMetaInformationGroupLength
        assert 12 + 126 == len(fp.getvalue())
Exemplo n.º 2
0
def save_dataset(event: events.Event) -> Path:
    """
    Save the dataset to a temporary location within MEDIA_ROOT.

    Parameters
    ----------
    event : events.Event
        C-STORE request event
    path : Path
        Destination path
    """
    instance_uid = event.request.AffectedSOPInstanceUID
    path = get_temp_path(instance_uid)
    with open(path, "wb") as content:
        # Write the preamble and prefix
        logging.debug(messages.WRITE_DICOM_PREFIX)
        content.write(b"\x00" * 128)
        content.write(b"DICM")

        # Encode and write the File Meta Information
        logging.debug(messages.WRITE_DICOM_METADATA)
        write_file_meta_info(content, event.file_meta)

        # Write the encoded dataset
        logging.debug(messages.WRITE_DICOM_DATASET)
        dataset = event.request.DataSet.getvalue()
        content.write(dataset)
    log_dataset_saved(path.name)
    return path
Exemplo n.º 3
0
    def handle(event):
        if write_ds == 1:
            with tempfile.TemporaryFile('w+b') as tfile:
                ds = event.dataset
                ds.file_meta = event.file_meta
                ds.save_as(tfile)
        elif write_ds in (2, 3):
            with tempfile.TemporaryFile('w+b') as tfile:
                tfile.write(b'\x00' * 128)
                tfile.write(b'DICM')
                write_file_meta_info(tfile, event.file_meta)
                tfile.write(event.request.DataSet.getvalue())

        return 0x0000
Exemplo n.º 4
0
Arquivo: scp.py Projeto: qurit/raiven
def handle_store(event):
    """ Handles EVT_C_STORE """
    requestor_ae_title, called_ae_title = get_ae_titles(event)

    path = CONNECTIONS[requestor_ae_title] / (
        event.request.AffectedSOPInstanceUID + '.dcm')
    with open(path, 'wb') as f:
        f.write(b'\x00' * 128)
        f.write(b'DICM')
        # TODO: check this is still needed
        # event.file_meta.TransferSyntaxUID = uid.ImplicitVRLittleEndian
        write_file_meta_info(f, event.file_meta)
        f.write(event.request.DataSet.getvalue())

    return 0x0000
Exemplo n.º 5
0
def handle_store(event, storage_dir):
    """Handle EVT_C_STORE events."""
    try:
        os.makedirs(storage_dir, exist_ok=True)
    except:
        # Unable to create output dir, return failure status
        return 0xC001

    # We rely on the UID from the C-STORE request instead of decoding
    fname = os.path.join(storage_dir, event.request.AffectedSOPInstanceUID)
    with open(fname, 'wb') as f:
        # Write the preamble, prefix and file meta information elements
        f.write(b'\x00' * 128)
        f.write(b'DICM')
        write_file_meta_info(f, event.file_meta)
        # Write the raw encoded dataset
        f.write(event.request.DataSet.getvalue())

    return 0x0000
Exemplo n.º 6
0
def handle_store(event, storage_dir):
    """Handle EVT_C_STORE events."""
    try:
        os.makedirs(storage_dir, exist_ok=True)
    except:
        # Unable to create output dir, return failure status
        return 0xC001
    #file = str("oliver^test")+".dcm"
    file = str(event.request.AffectedSOPInstanceUID)  #+".dcm"
    # We rely on the UID from the C-STORE request instead of decoding
    fname = os.path.join(storage_dir, file)
    with open(fname, 'wb') as f:
        # Write the preamble, prefix and file meta information elements
        f.write(b'\x00' * 128)
        f.write(b'DICM')
        write_file_meta_info(f, event.file_meta)
        # Write the raw encoded dataset
        f.write(event.request.DataSet.getvalue())

    #print ("patient Name = " , event.dataset.PatientName)
    #newfile = str(event.dataset.PatientName)[:-1]+str(event.request.AffectedSOPInstanceUID.split('.')[-1])+".dcm"
    #newfile = '{0!s}-{1!s}-{2!s}'.format(dataset.SOPInstanceUID, dataset.SeriesInstanceUID, dataset.InstanceNumber) + '.dcm'
    #newfName = os.path.join(storage_dir, newfile)
    #os.rename(fname, newfName )

    host = "192.168.8.177"
    port = "4242"
    arg0 = '-ds'  #-ds all files under dir, '' single file
    arg1 = '-q'  #--v verbose  q quick, d debug
    arg2 = '-cx'  #require context

    file = fname
    storescu.main([arg0, host, port, file, arg1, arg2])
    print("file send!", file)
    os.remove(file)
    return 0x0000
Exemplo n.º 7
0
    def decode_msg(self, primitive, assoc=None):
        """Converts P-DATA primitives into a ``DIMSEMessage`` sub-class.

        Decodes the data from the P-DATA service primitive (which
        may contain the results of one or more P-DATA-TF PDUs) into the
        :attr:`~DIMSEMessage.command_set` and :attr:`~DIMSEMessage.data_set`
        attributes. Also sets the :attr:`~DIMSEMessage.context_id` and
        :attr:`~DIMSEMessage.encoded_command_set` attributes of the
        ``DIMSEMessage`` sub-class object.

        Parameters
        ----------
        primitive : pdu_primitives.P_DATA
            The P-DATA service primitive to be decoded into a DIMSE message.
        assoc : association.Association, optional
            The association processing the message. This is required when:

            * :attr:`~pynetdicom._config.STORE_RECV_CHUNKED_DATASET` is
              ``True``
            * The P-DATA primitive contains part of a C-STORE-RQ message

            In this case the association is consulted for its accepted
            transfer syntax, which is included in the File Meta Information
            of the stored dataset.

        Returns
        -------
        bool
            ``True`` when the DIMSE message is completely decoded, ``False``
            otherwise.

        References
        ----------

        * DICOM Standard, Part 8, :dcm:`Annex E<part08/chapter_E.html>`
        """
        # Make sure this is a P-DATA primitive
        if primitive.__class__ != P_DATA or primitive is None:
            return False

        for (context_id, data) in primitive.presentation_data_value_list:

            # The first byte of the P-DATA is the Message Control Header
            #   See Part 8, Annex E.2
            # The standard says that only the significant bits (ie the last
            #   two) should be checked
            # xxxxxx00 - Message Dataset information, not the last fragment
            # xxxxxx01 - Command information, not the last fragment
            # xxxxxx10 - Message Dataset information, the last fragment
            # xxxxxx11 - Command information, the last fragment
            control_header_byte = data[0]

            # LOGGER.debug('Control header byte %s', control_header_byte)
            #print(f'Control header byte {control_header_byte}')

            # COMMAND SET
            # P-DATA fragment contains Command Set information
            #   (control_header_byte is xxxxxx01 or xxxxxx11)
            if control_header_byte & 1:
                # The command set may be spread out over a number
                #   of fragments and P-DATA primitives and we need to remember
                #   the elements from previous fragments, hence the
                #   encoded_command_set class attribute
                # This adds all the command set data to the class object
                self.encoded_command_set.write(data[1:])

                # The final command set fragment (xxxxxx11) has been added
                #   so decode the command set
                if control_header_byte & 2:
                    # Presentation Context ID
                    #   Set this now as must only be one final command set
                    #   fragment and command set must always be present
                    self.context_id = context_id

                    # Command Set is always encoded Implicit VR Little Endian
                    #   decode(dataset, is_implicit_VR, is_little_endian)
                    # pylint: disable=attribute-defined-outside-init
                    self.command_set = decode(self.encoded_command_set, True,
                                              True)

                    # Determine which DIMSE Message class to use
                    self.__class__ = (
                        _MESSAGE_TYPES[self.command_set.CommandField][1])

                    # Determine if a Data Set is present by checking for
                    #   (0000, 0800) CommandDataSetType US 1. If the value is
                    #   0x0101 no dataset present, otherwise one is.
                    if self.command_set.CommandDataSetType == 0x0101:
                        # By returning True we're indicating that the message
                        #   has been completely decoded
                        return True

                    # Data Set is present
                    if (_config.STORE_RECV_CHUNKED_DATASET
                            and isinstance(self, C_STORE_RQ)):
                        # delete=False is a workaround for Windows
                        # Setting delete=True prevents us from re-opening
                        # the file after it is opened by NamedTemporaryFile
                        # below.
                        self._data_set_file = NamedTemporaryFile(delete=False,
                                                                 mode="wb",
                                                                 suffix=".dcm")
                        self._data_set_path = Path(self._data_set_file.name)
                        # Write the File Meta
                        self._data_set_file.write(b'\x00' * 128)
                        self._data_set_file.write(b'DICM')

                        cs = self.command_set
                        cx = assoc._accepted_cx[context_id]
                        write_file_meta_info(
                            self._data_set_file,
                            create_file_meta(
                                sop_class_uid=cs.AffectedSOPClassUID,
                                sop_instance_uid=cs.AffectedSOPInstanceUID,
                                transfer_syntax=cx.transfer_syntax[0]))

            # DATA SET
            # P-DATA fragment contains Data Set information
            #   (control_header_byte is xxxxxx00 or xxxxxx10)
            else:
                # As with the command set, the data set may be spread over
                #   a number of fragments in each P-DATA primitive and a
                #   number of P-DATA primitives.
                if self._data_set_file:
                    self._data_set_file.write(data[1:])
                else:
                    self.data_set.write(data[1:])

                # The final data set fragment (xxxxxx10) has been added
                if control_header_byte & 2 != 0:
                    # By returning True we're indicating that the message
                    #   has been completely decoded
                    return True

        # We return False to indicate that the message isn't yet fully decoded
        return False
Exemplo n.º 8
0
    def __init__(self,
                 study_instance_uid: str,
                 series_instance_uid: str,
                 series_number: int,
                 sop_instance_uid: str,
                 sop_class_uid: str,
                 instance_number: int,
                 modality: str,
                 manufacturer: Optional[str] = None,
                 transfer_syntax_uid: Optional[str] = None,
                 patient_id: Optional[str] = None,
                 patient_name: Optional[str] = None,
                 patient_birth_date: Optional[str] = None,
                 patient_sex: Optional[str] = None,
                 accession_number: Optional[str] = None,
                 study_id: str = None,
                 study_date: Optional[Union[str, datetime.date]] = None,
                 study_time: Optional[Union[str, datetime.time]] = None,
                 referring_physician_name: Optional[str] = None,
                 content_qualification: Optional[Union[
                     str, ContentQualificationValues]] = None,
                 coding_schemes: Optional[
                     Sequence[CodingSchemeIdentificationItem]] = None,
                 series_description: Optional[str] = None):
        """
        Parameters
        ----------
        study_instance_uid: str
            UID of the study
        series_instance_uid: str
            UID of the series
        series_number: Union[int, None]
            Number of the series within the study
        sop_instance_uid: str
            UID that should be assigned to the instance
        instance_number: int
            Number that should be assigned to the instance
        manufacturer: str
            Name of the manufacturer (developer) of the device (software)
            that creates the instance
        modality: str
            Name of the modality
        transfer_syntax_uid: str, optional
            UID of transfer syntax that should be used for encoding of
            data elements. Defaults to Implicit VR Little Endian
            (UID ``"1.2.840.10008.1.2"``)
        patient_id: str, optional
           ID of the patient (medical record number)
        patient_name: str, optional
           Name of the patient
        patient_birth_date: str, optional
           Patient's birth date
        patient_sex: str, optional
           Patient's sex
        study_id: str, optional
           ID of the study
        accession_number: str, optional
           Accession number of the study
        study_date: Union[str, datetime.date], optional
           Date of study creation
        study_time: Union[str, datetime.time], optional
           Time of study creation
        referring_physician_name: str, optional
            Name of the referring physician
        content_qualification: Union[str, highdicom.enum.ContentQualificationValues], optional
            Indicator of content qualification
        coding_schemes: Sequence[highdicom.sr.coding.CodingSchemeIdentificationItem], optional
            private or public coding schemes that are not part of the
            DICOM standard
        series_description: str, optional
            Human readable description of the series

        Note
        ----
        The constructor only provides attributes that are required by the
        standard (type 1 and 2) as part of the Patient, General Study,
        Patient Study, General Series, General Equipment and SOP Common modules.
        Derived classes are responsible for providing additional attributes
        required by the corresponding Information Object Definition (IOD).
        Additional optional attributes can subsequently be added to the dataset.

        """  # noqa
        super().__init__()
        if transfer_syntax_uid is None:
            transfer_syntax_uid = ImplicitVRLittleEndian
        if transfer_syntax_uid == ExplicitVRBigEndian:
            self.is_little_endian = False
        else:
            self.is_little_endian = True
        if transfer_syntax_uid == ImplicitVRLittleEndian:
            self.is_implicit_VR = True
        else:
            self.is_implicit_VR = False

        # Include all File Meta Information required for writing SOP instance
        # to a file in PS3.10 format.
        self.preamble = b'\x00' * 128
        self.file_meta = Dataset()
        self.file_meta.DICOMPrefix = 'DICM'
        self.file_meta.FilePreamble = self.preamble
        self.file_meta.TransferSyntaxUID = transfer_syntax_uid
        self.file_meta.MediaStorageSOPClassUID = str(sop_class_uid)
        self.file_meta.MediaStorageSOPInstanceUID = str(sop_instance_uid)
        self.file_meta.FileMetaInformationVersion = b'\x00\x01'
        self.file_meta.ImplementationClassUID = '1.2.826.0.1.3680043.9.7433.1.1'
        self.file_meta.ImplementationVersionName = '{} v{}'.format(
            __name__.split('.')[0], __version__)
        self.fix_meta_info(enforce_standard=True)
        with BytesIO() as fp:
            write_file_meta_info(fp, self.file_meta, enforce_standard=True)
            self.file_meta.FileMetaInformationGroupLength = len(fp.getvalue())

        # Patient
        self.PatientID = patient_id
        self.PatientName = patient_name
        self.PatientBirthDate = patient_birth_date
        self.PatientSex = patient_sex

        # Study
        self.StudyInstanceUID = str(study_instance_uid)
        self.AccessionNumber = accession_number
        self.StudyID = study_id
        self.StudyDate = DA(study_date) if study_date is not None else None
        self.StudyTime = TM(study_time) if study_time is not None else None
        self.ReferringPhysicianName = referring_physician_name

        # Series
        self.SeriesInstanceUID = str(series_instance_uid)
        self.SeriesNumber = series_number
        self.Modality = modality
        if series_description is not None:
            self.SeriesDescription = series_description

        # Equipment
        self.Manufacturer = manufacturer

        # Instance
        self.SOPInstanceUID = str(sop_instance_uid)
        self.SOPClassUID = str(sop_class_uid)
        self.InstanceNumber = instance_number
        self.ContentDate = DA(datetime.datetime.now().date())
        self.ContentTime = TM(datetime.datetime.now().time())
        if content_qualification is not None:
            content_qualification = ContentQualificationValues(
                content_qualification)
            self.ContentQualification = content_qualification.value
        if coding_schemes is not None:
            self.CodingSchemeIdentificationSequence: List[Dataset] = []
            for item in coding_schemes:
                if not isinstance(item, CodingSchemeIdentificationItem):
                    raise TypeError(
                        'Coding scheme identification item must have type '
                        '"CodingSchemeIdentificationItem".')
                self.CodingSchemeIdentificationSequence.append(item)
Exemplo n.º 9
0
def handle_store(event, args, app_logger):
    """Handle a C-STORE request.

    Parameters
    ----------
    event : pynetdicom.event.event
        The event corresponding to a C-STORE request.
    args : argparse.Namespace
        The namespace containing the arguments to use. The namespace should
        contain ``args.ignore`` and ``args.output_directory`` attributes.
    app_logger : logging.Logger
        The application's logger.

    Returns
    -------
    status : pynetdicom.sop_class.Status or int
        A valid return status code, see PS3.4 Annex B.2.3 or the
        ``StorageServiceClass`` implementation for the available statuses
    """
    if args.ignore:
        return 0x0000

    try:
        ds = event.dataset
        # Remove any Group 0x0002 elements that may have been included
        ds = ds[0x00030000:]
    except Exception as exc:
        app_logger.error("Unable to decode the dataset")
        app_logger.exception(exc)
        # Unable to decode dataset
        return 0x210

    # Add the file meta information elements
    ds.file_meta = event.file_meta

    # Because pydicom uses deferred reads for its decoding, decoding errors
    #   are hidden until encountered by accessing a faulty element
    try:
        sop_class = ds.SOPClassUID
        sop_instance = ds.SOPInstanceUID
    except Exception as exc:
        app_logger.error(
            "Unable to decode the received dataset or missing 'SOP Class "
            "UID' and/or 'SOP Instance UID' elements")
        app_logger.exception(exc)
        # Unable to decode dataset
        return 0xC210

    try:
        # Get the elements we need
        mode_prefix = SOP_CLASS_PREFIXES[sop_class][0]
    except KeyError:
        mode_prefix = 'UN'

    filename = f'{mode_prefix}.{sop_instance}'
    app_logger.info(f'Storing DICOM file: {filename}')

    status_ds = Dataset()
    status_ds.Status = 0x0000

    # Try to save to output-directory
    if args.output_directory is not None:
        filename = os.path.join(args.output_directory, filename)
        try:
            os.makedirs(args.output_directory, exist_ok=True)
        except Exception as exc:
            app_logger.error('Unable to create the output directory:')
            app_logger.error(f"    {args.output_directory}")
            app_logger.exception(exc)
            # Failed - Out of Resources - IOError
            status_ds.Status = 0xA700
            return status_ds

    if os.path.exists(filename):
        app_logger.warning('DICOM file already exists, overwriting')

    try:
        if event.context.transfer_syntax == DeflatedExplicitVRLittleEndian:
            # Workaround for pydicom issue #1086
            with open(filename, 'wb') as f:
                f.write(b'\x00' * 128)
                f.write(b'DICM')
                f.write(write_file_meta_info(f, event.file_meta))
                f.write(encode(ds, False, True, True))
        else:
            # We use `write_like_original=False` to ensure that a compliant
            #   File Meta Information Header is written
            ds.save_as(filename, write_like_original=False)

        status_ds.Status = 0x0000  # Success
    except IOError as exc:
        app_logger.error('Could not write file to specified directory:')
        app_logger.error(f"    {os.path.dirname(filename)}")
        app_logger.exception(exc)
        # Failed - Out of Resources - IOError
        status_ds.Status = 0xA700
    except Exception as exc:
        app_logger.error('Could not write file to specified directory:')
        app_logger.error(f"    {os.path.dirname(filename)}")
        app_logger.exception(exc)
        # Failed - Out of Resources - Miscellaneous error
        status_ds.Status = 0xA701

    return status_ds