def retrieve_task(task, data_object_id): """ Fetch a Dicom Object from the dicom location using the retrieve_type GET or MOVE """ do = DataObject.query.filter_by(id=data_object_id).first() dicom_connector = DicomConnector( host=do.dataset.from_dicom_location.host, port=do.dataset.from_dicom_location.port, ae_title=do.dataset.from_dicom_location.ae_title, ) dicom_verify = dicom_connector.verify() if not dicom_verify: logger.error("Unable to connect to Dicom Location: {0} {1} {2}".format( do.dataset.from_dicom_location.host, do.dataset.from_dicom_location.port, do.dataset.from_dicom_location.ae_title, )) return dicom_path = dicom_connector.download_series(do.series_instance_uid) do.is_fetched = True do.path = dicom_path db.session.commit()
def move_task(task, endpoint, seriesUIDs, host, port, ae_title): """ Background task that triggers the Dicom MOVE operation at the given endpoint for the given seriesUIDs """ # For each series UID supplied, fetch the image series and run the algorithm total = len(seriesUIDs) count = 0 dicom_connector = DicomConnector(host=host, port=port, ae_title=ae_title) task.update_state( state="PROGRESS", meta={ "current": count, "total": total, "status": "Verifying dicom location" }, ) dicom_verify = dicom_connector.verify() if dicom_verify == None: return { "current": 100, "total": 100, "status": "Unable to connect to dicom location", } for suid in seriesUIDs: task.update_state( state="PROGRESS", meta={ "current": count, "total": total, "status": "Moving series for UID: {0}".format(suid), }, ) logger.info("Moving Series with UID: {0}".format(suid)) dicom_connector.move_series(suid) count = count + 1 task.update_state( state="SUCCESS", meta={ "current": total, "total": total, "status": "Move Complete" }, )
def fetch_status(): celery_running = False if celery.control.inspect().active(): celery_running = True status_context = {"celery": celery_running} status_context["algorithms"] = [] for a in app.algorithms: algorithm = app.algorithms[a] status_context["algorithms"].append({ "name": algorithm.name, "default_settings": algorithm.default_settings }) dicom_connector = DicomConnector(port=app.dicom_listener_port, ae_title=app.dicom_listener_aetitle) dicom_listening = False if dicom_connector.verify(): dicom_listening = True status_context["dicom_listener"] = { "port": app.dicom_listener_port, "aetitle": app.dicom_listener_aetitle, "listening": dicom_listening, } status_context["ram_usage"] = psutil.virtual_memory()._asdict() status_context["disk_usage"] = psutil.disk_usage("/")._asdict() status_context["cpu_usage"] = psutil.cpu_percent() status_context["applications"] = [] for ak in APIKey.query.all(): status_context["applications"].append({"name": ak.name, "key": ak.key}) return jsonify(status_context)
def post(self): key = request.headers["API_KEY"] args = self.parser.parse_args() dataset_id = args["dataset"] # Get the dataset to which this data object should be added ds = Dataset.query.filter_by(owner_key=key, id=dataset_id).first() if not ds: return {"Error": "Dataset not found"}, 404 # Get the parent dataset if one was given parent = None if args["parent"]: parent = DataObject.query.filter_by(dataset_id=ds.id, id=args["parent"]).first() if not parent: return {"Error": "Parent Data Object not found"}, 404 meta_data = None if args["meta_data"]: meta_data = json.loads(args["meta_data"]) # Create the DataObject do = DataObject( dataset=ds, is_input=True, type=args["type"], series_instance_uid=args["seriesUID"], meta_data=meta_data, parent=parent, ) db.session.add(do) db.session.commit() if args["type"] == "DICOM": dicom_fetch = args["dicom_retrieve"] if not dicom_fetch: return ( { "message": { "dicom_retrieve": "Set GET, MOVE or SEND to be able to retrieve Dicom objects." } }, 400, ) if not args["seriesUID"]: return ( { "message": { "seriesUID": "SeriesUID is required to be able to retrieve DICOM objects" } }, 400, ) if dicom_fetch == "MOVE": if not ds.from_dicom_location: return ( { "message": { "from_dicom_location": "Dataset From Dicom Location not set, so unable to MOVE DICOM objects" } }, 400, ) # Fetch Dicom data using MOVE # Check whether or not we are listening for for Dicom MOVE listening_connector = DicomConnector( host="127.0.0.1", port=app.dicom_listener_port, ae_title=app.dicom_listener_aetitle, ) if not listening_connector.verify(): # Verify Dicom Location is listening timeout_seconds = 20 time_waited = 0 # We are not listening, wait for 20 seconds and abort if still not listening while not listening_connector.verify(): logger.debug( "Not listening for MOVE, sleeping for 1 second and will try again" ) time.sleep(1) time_waited += 1 if time_waited >= timeout_seconds: msg = "Listener for MOVE timeout on port: {0}".format( ds.from_dicom_location.move_port) logger.error(msg) return { "message": { "from_dicom_location": msg } }, 400 logger.info("Listening for MOVE OK") # Trigger MOVE logger.info( "Triggering MOVE at {0} for series UID: {1}", app.dicom_listener_aetitle, do.series_instance_uid, ) dicom_connector = DicomConnector( host=ds.from_dicom_location.host, port=ds.from_dicom_location.port, ae_title=ds.from_dicom_location.ae_title, ) dicom_verify = dicom_connector.verify() if dicom_verify: dicom_connector.move_series( do.series_instance_uid, move_aet=app.dicom_listener_aetitle) else: msg = "Unable to connect to Dicom Location: {0} {1} {2}".format( ds.from_dicom_location.host, ds.from_dicom_location.port, ds.from_dicom_location.ae_title, ) logger.error(msg) return {"message": {"from_dicom_location": msg}}, 400 elif dicom_fetch == "GET": if not ds.from_dicom_location: return ( { "message": { "from_dicom_location": "Dataset From Dicom Location not set, so unable to GET DICOM objects" } }, 400, ) # Fetch Dicom data using GET task = retrieve_task.apply_async([do.id]) # If dicom_fetch is SEND we don't do anything here, just wait for the client # to send to our Dicom Listener. elif args["type"] == "FILE": if not args["file_name"]: return {"message": {"file_name": "Provide the file name"}}, 400 if not args["file_data"]: return {"message": {"file_data": "Provide the file data"}}, 400 # Save the file file_path = os.path.join(tempfile.mkdtemp(), args["file_name"]) args["file_data"].save(file_path) do.is_fetched = True do.path = file_path db.session.add(do) db.session.commit() return do
def run_task(task, algorithm_name, config, dataset_id): task_id = task.request.id start = time.time() # Commit to refresh session db.session.commit() algorithm = app.algorithms[algorithm_name] if not config: config = algorithm.default_settings ds = Dataset.query.filter_by(id=dataset_id).first() input_objects = ds.input_data_objects logger.info("Will run algorithm: " + algorithm_name) logger.info("Using settings: " + str(config)) logger.info("Number of data objects in dataset: " + str(len(input_objects))) state_details = { "current": 0, "total": len(input_objects), "status": "Running Algorithm: {0}".format(algorithm_name), } task.update_state(state="RUNNING", meta=state_details) if config is None: output_data_objects = algorithm.function(input_objects, tempfile.mkdtemp()) else: output_data_objects = algorithm.function(input_objects, tempfile.mkdtemp(), config) if not output_data_objects: logger.warning( "Algorithm ({0}) did not return any output objects".format( algorithm_name)) # Save the data objects for do in output_data_objects: do.dataset_id = ds.id db.session.add(do) db.session.commit() if do.type == "DICOM": if ds.to_dicom_location: logger.info("Sending to Dicom To Location") dicom_connector = DicomConnector( host=do.dataset.to_dicom_location.host, port=do.dataset.to_dicom_location.port, ae_title=do.dataset.to_dicom_location.ae_title, ) dicom_verify = dicom_connector.verify() if not dicom_verify: logger.error( "Unable to connect to Dicom Location: {0} {1} {2}". format( do.dataset.to_dicom_location.host, do.dataset.to_dicom_location.port, do.dataset.to_dicom_location.ae_title, )) continue send_result = dicom_connector.send_dcm(do.path) if send_result: do.is_sent = True db.session.add(do) db.session.commit() else: logger.warning( "DICOM Data Object output but not Dicom To location defined in Dataset" ) end = time.time() time_taken = end - start logger.info("Dataset processing complete, took: " + str(time_taken)) logger.info("Number of data objects generated: " + str(len(output_data_objects))) state_details = { "current": len(input_objects), "total": len(input_objects), "status": "Running Algorithm Complete: {0}".format(algorithm_name), } task.update_state(state="COMPLETE", meta=state_details)