def test_max_pdu_good(self): """ Check AE maximum pdu size change produces good value """ ae = AE(scu_sop_class=['1.2.840.10008.1.1']) ae.maximum_pdu_size = -10 self.assertTrue(ae.maximum_pdu_size == 16382) ae.maximum_pdu_size = ['a'] self.assertTrue(ae.maximum_pdu_size == 16382) ae.maximum_pdu_size = '10' self.assertTrue(ae.maximum_pdu_size == 16382) ae.maximum_pdu_size = 0 self.assertTrue(ae.maximum_pdu_size == 0) ae.maximum_pdu_size = 5000 self.assertTrue(ae.maximum_pdu_size == 5000)
def create_application_entity(self, allow_echo: bool = True, maximum_pdu_size: int = 0) -> AE: """ Returns an :class:`~pynetdicom.ae.ApplicationEntity` instance. Parameters ---------- allow_echo : bool Whether to enable C-ECHO request handling or not, default is True maximum_pdu_size : int Maximal PDU size. By default, overrides pynetdicom's default setting to 0 (unlimited) Returns ------- AE DICOM networking application entity """ from django_dicom.models.networking import ( messages as networking_messages, ) logger = logging.getLogger("data.dicom.networking") # Get application entity title from the application settings and log # start. ae_title = get_application_entity_title() start_message = networking_messages.APPLICATION_ENTITY_START.format( title=ae_title) logger.info(start_message) # Create application entity instance. application_entity = AE(ae_title=ae_title) # Log end. end_message = networking_messages.APPLICATION_ENTITY_SUCCESS logger.info(end_message) # Add C-ECHO request handling if *allow_echo=True*. if allow_echo: application_entity.add_supported_context(VerificationSOPClass, ALL_TRANSFER_SYNTAXES[:]) logger.debug(networking_messages.C_ECHO_ENABLED) # Modify the maximal PDU size to optimize throughput. application_entity.maximum_pdu_size = maximum_pdu_size if maximum_pdu_size != 0: message = networking_messages.PDU_LIMIT_CONFIGURATION.format( maximum_pdu_size=maximum_pdu_size) logger.debug(message) return application_entity
def main(args=None): """Run the application.""" if args is not None: sys.argv = args args = _setup_argparser() if args.version: print('storescp.py v{}'.format(__version__)) sys.exit() APP_LOGGER = setup_logging(args, 'storescp') APP_LOGGER.debug('storescp.py v{0!s}'.format(__version__)) APP_LOGGER.debug('') # Set Transfer Syntax options transfer_syntax = ALL_TRANSFER_SYNTAXES[:] if args.prefer_uncompr: transfer_syntax.remove(ImplicitVRLittleEndian) transfer_syntax.append(ImplicitVRLittleEndian) elif args.prefer_little: transfer_syntax.remove(ExplicitVRLittleEndian) transfer_syntax.insert(0, ExplicitVRLittleEndian) elif args.prefer_big: transfer_syntax.remove(ExplicitVRBigEndian) transfer_syntax.insert(0, ExplicitVRBigEndian) elif args.implicit: transfer_syntax = [ImplicitVRLittleEndian] handlers = [(evt.EVT_C_STORE, handle_store, [args, APP_LOGGER])] # Create application entity ae = AE(ae_title=args.ae_title) # Add presentation contexts with specified transfer syntaxes for context in AllStoragePresentationContexts: ae.add_supported_context(context.abstract_syntax, transfer_syntax) if not args.no_echo: for context in VerificationPresentationContexts: ae.add_supported_context(context.abstract_syntax, transfer_syntax) ae.maximum_pdu_size = args.max_pdu # Set timeouts ae.network_timeout = args.network_timeout ae.acse_timeout = args.acse_timeout ae.dimse_timeout = args.dimse_timeout ae.start_server((args.bind_address, args.port), evt_handlers=handlers)
def main(args=None): """Run the application.""" if args is not None: sys.argv = args args = _setup_argparser() if args.version: print(f"echoscp.py v{__version__}") sys.exit() APP_LOGGER = setup_logging(args, "echoscp") APP_LOGGER.debug(f"echoscp.py v{__version__}") APP_LOGGER.debug("") # Set Transfer Syntax options transfer_syntax = ALL_TRANSFER_SYNTAXES[:] if args.prefer_uncompr: transfer_syntax.remove(str(ImplicitVRLittleEndian)) transfer_syntax.append(ImplicitVRLittleEndian) elif args.prefer_little: transfer_syntax.remove(str(ExplicitVRLittleEndian)) transfer_syntax.insert(0, ExplicitVRLittleEndian) elif args.prefer_big: transfer_syntax.remove(str(ExplicitVRBigEndian)) transfer_syntax.insert(0, ExplicitVRBigEndian) elif args.implicit: transfer_syntax = [ImplicitVRLittleEndian] handlers = [(evt.EVT_C_ECHO, handle_echo)] # Create application entity ae = AE(ae_title=args.ae_title) ae.add_supported_context(Verification, transfer_syntax) ae.maximum_pdu_size = args.max_pdu ae.network_timeout = args.network_timeout ae.acse_timeout = args.acse_timeout ae.dimse_timeout = args.dimse_timeout ae.start_server((args.bind_address, args.port), evt_handlers=handlers)
def main(): global inference_queue inference_queue = deque() global dicom_store dicom_store = {} # Parent folder to all storage requests os_helpers.make_directory(config.SCP_STORAGE_PATH) ae = AE() ae.network_timeout = None ae.acse_timeout = None ae.dimse_timeout = None ae.maximum_pdu_size = 0 ae.maximum_associations = 14 # Tested with 14 threads handlers = [ (evt.EVT_ACCEPTED, handle_accepted), (evt.EVT_C_STORE, handle_store), (evt.EVT_RELEASED, handle_release), ] storage_sop_classes = [ cx.abstract_syntax for cx in AllStoragePresentationContexts ] for uid in storage_sop_classes: ae.add_supported_context(uid, ALL_TRANSFER_SYNTAXES) ae.start_server((config.SCP_IP, config.SCP_PORT), block=False, evt_handlers=handlers) print_listening() inference_loop()
def main(): handlers = [(evt.EVT_C_STORE, handle_store), (evt.EVT_RELEASED, handle_release)] ae = AE() ae.network_timeout = None ae.acse_timeout = None ae.dimse_timeout = None ae.maximum_pdu_size = 0 ae.maximum_associations = 1 # TODO Handle more than one storage_sop_classes = [ cx.abstract_syntax for cx in AllStoragePresentationContexts ] for uid in storage_sop_classes: ae.add_supported_context(uid, ALL_TRANSFER_SYNTAXES) print("\nListening for association request on port:", config.SCP_PORT) ae.start_server((config.SCP_IP, config.SCP_PORT), block=True, evt_handlers=handlers)
def main(args=None): """Run the application.""" if args is not None: sys.argv = args args = _setup_argparser() if args.version: print(f"qrscp.py v{__version__}") sys.exit() APP_LOGGER = setup_logging(args, "qrscp") APP_LOGGER.debug(f"qrscp.py v{__version__}") APP_LOGGER.debug("") APP_LOGGER.debug("Using configuration from:") APP_LOGGER.debug(f" {args.config}") APP_LOGGER.debug("") config = ConfigParser() config.read(args.config) if args.ae_title: config["DEFAULT"]["ae_title"] = args.ae_title if args.port: config["DEFAULT"]["port"] = args.port if args.max_pdu: config["DEFAULT"]["max_pdu"] = args.max_pdu if args.acse_timeout: config["DEFAULT"]["acse_timeout"] = args.acse_timeout if args.dimse_timeout: config["DEFAULT"]["dimse_timeout"] = args.dimse_timeout if args.network_timeout: config["DEFAULT"]["network_timeout"] = args.network_timeout if args.bind_address: config["DEFAULT"]["bind_address"] = args.bind_address if args.database_location: config["DEFAULT"]["database_location"] = args.database_location if args.instance_location: config["DEFAULT"]["instance_location"] = args.instance_location # Log configuration settings _log_config(config, APP_LOGGER) app_config = config["DEFAULT"] dests = {} for ae_title in config.sections(): dest = config[ae_title] # Convert to bytes and validate the AE title ae_title = set_ae(ae_title, "ae_title", False, False) dests[ae_title] = (dest["address"], dest.getint("port")) # Use default or specified configuration file current_dir = os.path.abspath(os.path.dirname(__file__)) instance_dir = os.path.join(current_dir, app_config["instance_location"]) db_path = os.path.join(current_dir, app_config["database_location"]) # The path to the database db_path = f"sqlite:///{db_path}" db.create(db_path) # Clean up the database and storage directory if args.clean: response = input( "This will delete all instances from both the storage directory " "and the database. Are you sure you wish to continue? [yes/no]: ") if response != "yes": sys.exit() if clean(db_path, instance_dir, APP_LOGGER): sys.exit() else: sys.exit(1) # Try to create the instance storage directory os.makedirs(instance_dir, exist_ok=True) ae = AE(app_config["ae_title"]) ae.maximum_pdu_size = app_config.getint("max_pdu") ae.acse_timeout = app_config.getfloat("acse_timeout") ae.dimse_timeout = app_config.getfloat("dimse_timeout") ae.network_timeout = app_config.getfloat("network_timeout") ## Add supported presentation contexts # Verification SCP ae.add_supported_context(Verification, ALL_TRANSFER_SYNTAXES) # Storage SCP - support all transfer syntaxes for cx in AllStoragePresentationContexts: ae.add_supported_context(cx.abstract_syntax, ALL_TRANSFER_SYNTAXES, scp_role=True, scu_role=False) # Query/Retrieve SCP ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(StudyRootQueryRetrieveInformationModelFind) ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove) ae.add_supported_context(StudyRootQueryRetrieveInformationModelGet) # Set our handler bindings handlers = [ (evt.EVT_C_ECHO, handle_echo, [args, APP_LOGGER]), (evt.EVT_C_FIND, handle_find, [db_path, args, APP_LOGGER]), (evt.EVT_C_GET, handle_get, [db_path, args, APP_LOGGER]), (evt.EVT_C_MOVE, handle_move, [dests, db_path, args, APP_LOGGER]), (evt.EVT_C_STORE, handle_store, [instance_dir, db_path, args, APP_LOGGER]), ] # Listen for incoming association requests ae.start_server((app_config["bind_address"], app_config.getint("port")), evt_handlers=handlers)
except: dcm_files = [] yield 0 for dcm in dcm_files: ds = dcmread(dcm, force=True) yield 0xFF00, ds handlers = [(evt.EVT_C_GET, handle_get)] # Create application entity ae = AE(ae_title=args.aetitle) for context in StoragePresentationContexts: ae.add_supported_context(context.abstract_syntax, transfer_syntax, scp_role=True, scu_role=False) for context in QueryRetrievePresentationContexts: ae.add_supported_context(context.abstract_syntax, transfer_syntax) ae.maximum_pdu_size = args.max_pdu # Set timeouts ae.network_timeout = args.timeout ae.acse_timeout = args.acse_timeout ae.dimse_timeout = args.dimse_timeout ae.start_server(('', args.port), evt_handlers=handlers)
dcm_files = [os.path.join(basedir, x) for x in dcm_files] for dcm in dcm_files: data = read_file(dcm, force=True) d = Dataset() d.QueryRetrieveLevel = dataset.QueryRetrieveLevel d.RetrieveAETitle = args.aetitle d.PatientName = data.PatientName yield d # Create application entity ae = AE( ae_title=args.aetitle, port=args.port, scu_sop_class=[], scp_sop_class=QueryRetrieveSOPClassList, transfer_syntax=transfer_syntax, ) ae.maximum_pdu_size = args.max_pdu # Set timeouts ae.network_timeout = args.timeout ae.acse_timeout = args.acse_timeout ae.dimse_timeout = args.dimse_timeout ae.on_c_find = on_c_find ae.start()
def receive_store_internal(nr_assoc, ds_per_assoc, write_ds=0, use_yappi=False): """Run a Storage SCP and transfer datasets with pynetdicom alone. Parameters ---------- nr_assoc : int The total number of (sequential) associations that will be made. ds_per_assoc : int The number of C-STORE requests sent per successful association. write_ds : int, optional ``0`` to not write to file (default), ``1`` to write the received dataset to file using event.dataset, ``2`` to write using the raw ``bytes``, . ``3`` to write raw bytes with unlimited PDU size. use_yappi : bool, optional True to use the yappi profiler, False otherwise (default). """ if use_yappi: init_yappi() def handle(event): if write_ds == 1: with tempfile.TemporaryFile('w+b') as tfile: ds = event.dataset ds.file_meta = event.file_meta ds.save_as(tfile) elif write_ds in (2, 3): with tempfile.TemporaryFile('w+b') as tfile: tfile.write(b'\x00' * 128) tfile.write(b'DICM') write_file_meta_info(tfile, event.file_meta) tfile.write(event.request.DataSet.getvalue()) return 0x0000 ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 if write_ds == 3: ae.maximum_pdu_size = 0 ae.add_supported_context(DATASET.SOPClassUID, ImplicitVRLittleEndian) ae.add_requested_context(DATASET.SOPClassUID, ImplicitVRLittleEndian) server = ae.start_server(('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle)]) time.sleep(0.5) start_time = time.time() run_times = [] is_successful = True for ii in range(nr_assoc): assoc = ae.associate('127.0.0.1', 11112) if assoc.is_established: for jj in range(ds_per_assoc): assoc.send_c_store(DATASET) assoc.release() if is_successful: print("C-STORE SCU/SCP transferred {} total datasets over {} " "association(s) in {:.2f} s".format(nr_assoc * ds_per_assoc, nr_assoc, time.time() - start_time)) else: print("C-STORE SCU/SCP benchmark failed") server.shutdown()
def receive_store_internal( test_ds, nr_assoc, ds_per_assoc, write_ds=0, use_yappi=False ): """Run a Storage SCP and transfer datasets with pynetdicom alone. Parameters ---------- test_ds : pydicom.dataset.Dataset The test dataset to use nr_assoc : int The total number of (sequential) associations that will be made. ds_per_assoc : int The number of C-STORE requests sent per successful association. write_ds : int, optional ``0`` to not write to file (default), ``1`` to write the received dataset to file using event.dataset, ``2`` to write using the raw ``bytes``, . ``3`` to write raw bytes with unlimited PDU size. use_yappi : bool, optional True to use the yappi profiler, False otherwise (default). """ if use_yappi: init_yappi() def handle(event): if write_ds == 1: with tempfile.TemporaryFile("w+b") as tfile: ds = event.dataset ds.file_meta = event.file_meta ds.save_as(tfile) elif write_ds in (2, 3): with tempfile.TemporaryFile("w+b") as tfile: tfile.write(b"\x00" * 128) tfile.write(b"DICM") write_file_meta_info(tfile, event.file_meta) tfile.write(event.request.DataSet.getvalue()) return 0x0000 ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 if write_ds == 3: ae.maximum_pdu_size = 0 ae.add_supported_context(test_ds.SOPClassUID, ImplicitVRLittleEndian) ae.add_requested_context(test_ds.SOPClassUID, ImplicitVRLittleEndian) server = ae.start_server( ("localhost", 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle)] ) time.sleep(0.5) start_time = time.time() is_successful = True for ii in range(nr_assoc): assoc = ae.associate("127.0.0.1", 11112) if assoc.is_established: for jj in range(ds_per_assoc): assoc.send_c_store(test_ds) assoc.release() if is_successful: write_msg = ["", " (write)", " (write fast)", " (write fastest)"][write_ds] print( f"C-STORE SCU/SCP transferred {nr_assoc * ds_per_assoc} total " f"{os.path.basename(test_ds.filename)} datasets over " f"{nr_assoc} association{'' if nr_assoc == 1 else 's'}{write_msg} " f"in {time.time() - start_time:.2f} s" ) else: print("C-STORE SCU/SCP benchmark failed") server.shutdown()
return 0x0000 # List of event handlers handlers = [(evt.EVT_C_STORE, handle_store, [Path('dcmstore/received')]), (evt.EVT_C_ECHO, handle_echo)] ae = AE() # Accept storage of all SOP classes that pynetdicom supports storage_sop_classes = [ cx.abstract_syntax for cx in AllStoragePresentationContexts ] for uid in storage_sop_classes: ae.add_supported_context(uid, ALL_TRANSFER_SYNTAXES) ae.add_supported_context(VerificationSOPClass) # Supposedly increases transfer speed # ref: https://pydicom.github.io/pynetdicom/dev/examples/storage.html#storage-scp ae.maximum_pdu_size = 0 # Start server on localhost, port 11112 (this is the port internal # to the docker container. The port that the network sees is the # host port specified in the .env file.) ae.start_server( ('', 11112), block=True, # Socket operates in blocking mode ae_title=os.environ['AE_TITLE'], # specified in the .env file evt_handlers=handlers)