Ejemplo n.º 1
0
def is_target_json_valid(folder):
    """
    Checks if the task.json file exists and is also valid. Mandatory
    subkeys are target_ip, target_port and target_aet_target under the
    dispatch key
    """
    path = Path(folder) / mercure_names.TASKFILE
    if not path.exists():
        return None

    with open(path, "r") as f:
        target = json.load(f)

    if not all([
            key in target.get("dispatch", {})
            for key in ["target_ip", "target_port", "target_aet_target"]
    ]):
        send_series_event(
            s_events.ERROR,
            target.get("dispatch", {}).get("series_uid", "None"),
            0,
            target.get("dispatch", {}).get("target_name", "None"),
            f"task.json is missing a mandatory key {target}",
        )
        return None
    return target["dispatch"]
Ejemplo n.º 2
0
def push_series_discard(fileList,series_UID,discard_series):
    """Discards the series by moving all files into the "discard" folder, which is periodically cleared."""
    # Define the source and target folder. Use UUID as name for the target folder in the 
    # discard directory to avoid collisions
    discard_path  =config.mercure['discard_folder']  + '/' + str(uuid.uuid1())
    discard_folder=discard_path + '/'
    source_folder =config.mercure['incoming_folder'] + '/'

    # Create subfolder in the discard directory and validate that is has been created
    try:
        os.mkdir(discard_path)
    except Exception:
        logger.exception(f'Unable to create outgoing folder {discard_path}')
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR, f'Unable to create discard folder {discard_path}')
        return
    if not Path(discard_path).exists():
        logger.error(f'Creating discard folder not possible {discard_path}')
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR, f'Creating discard folder not possible {discard_path}')
        return

    # Create lock file in destination folder (to prevent the cleaner module to work on the folder). Note that 
    # the DICOM series in the incoming folder has already been locked in the parent function.
    try:
        lock_file=Path(discard_path / mercure_names.LOCK)
        lock=helper.FileLock(lock_file)
    except:
        # Can't create lock file, so something must be seriously wrong
        logger.error(f'Unable to create lock file {lock_file}')
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR, f'Unable to create lock file in discard folder {lock_file}')
        return

    info_text = ""
    if discard_series:
        info_text = "Discard by rule " + discard_series
    monitor.send_series_event(monitor.s_events.DISCARD, series_UID, len(fileList), "", info_text)

    for entry in fileList:
        try:
            shutil.move(source_folder+entry+mercure_names.DCM,discard_folder+entry+mercure_names.DCM)
            shutil.move(source_folder+entry+mercure_names.TAGS,discard_folder+entry+mercure_names.TAGS)
        except Exception:
            logger.exception(f'Problem while discarding file {entry}')
            logger.exception(f'Source folder {source_folder}')
            logger.exception(f'Target folder {discard_folder}')
            monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR, f'Problem during discarding file {entry}')

    monitor.send_series_event(monitor.s_events.MOVE, series_UID, len(fileList), discard_path, "")

    try:
        lock.free()
    except:
        # Can't delete lock file, so something must be seriously wrong
        logger.error(f'Unable to remove lock file {lock_file}')
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR, f'Unable to remove lock file {lock_file}')
        return
Ejemplo n.º 3
0
def search_folder(counter):
    global processor_lockfile
    global processor_is_locked

    helper.g_log('events.run', 1)

    tasks = {}

    for entry in os.scandir(config.mercure['processing_folder']):
        if entry.is_dir() and is_ready_for_processing(entry.path):
            modification_time = entry.stat().st_mtime
            tasks[entry.path] = modification_time

    # Check if processing has been suspended via the UI
    if processor_lockfile.exists():
        if not processor_is_locked:
            processor_is_locked = True
            logger.info("Processing halted")
        return False
    else:
        if processor_is_locked:
            processor_is_locked = False
            logger.info("Processing resumed")

    # Return if no tasks have been found
    if not len(tasks):
        return False

    sorted_tasks = sorted(tasks)
    # TODO: Add priority sorting. However, do not honor the priority flag for every third run
    #       so that stagnation of cases is avoided

    # Only process one case at a time because the processing might take a while and
    # another instance might have processed the other entries already. So the folder
    # needs to be refreshed each time
    task = sorted_tasks[0]

    try:
        process_series(task)
        # Return true, so that the parent function will trigger another search of the folder
        return True
    except Exception:
        logger.exception(f'Problems while processing series {task}')
        monitor.send_series_event(monitor.s_events.ERROR, entry, 0, "",
                                  "Exception while processing")
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR,
                           "Exception while processing series")
        return False
Ejemplo n.º 4
0
def delete_folder(entry):
    """ Deletes given folder. """
    delete_path = entry[0]
    series_uid = find_series_uid(delete_path)
    try:
        rmtree(delete_path)
        logger.info(f"Deleted folder {delete_path} from {series_uid}")
        send_series_event(s_events.CLEAN, series_uid, 0, delete_path,
                          "Deleted folder")
    except Exception as e:
        logger.info(f"Unable to delete folder {delete_path}")
        logger.exception(e)
        send_series_event(s_events.ERROR, series_uid, 0, delete_path,
                          "Unable to delete folder")
        monitor.send_event(
            monitor.h_events.PROCESSING,
            monitor.severity.ERROR,
            f"Unable to delete folder {delete_path}",
        )
Ejemplo n.º 5
0
def push_serieslevel_outgoing(triggered_rules, file_list, series_UID,
                              tags_list, selected_targets):
    """Move the DICOM files of the series to a separate subfolder for each target in the outgoing folder."""
    source_folder = config.mercure['incoming_folder'] + '/'

    # Determine if the files should be copied or moved. If only one rule triggered, files can
    # safely be moved, otherwise files will be moved and removed in the end
    move_operation = False
    if len(triggered_rules) == 1:
        move_operation = True

    for target in selected_targets:
        if not target in config.mercure["targets"]:
            logger.error(f"Invalid target selected {target}")
            monitor.send_event(monitor.h_events.PROCESSING,
                               monitor.severity.ERROR,
                               f"Invalid target selected {target}")
            continue

        folder_name = config.mercure['outgoing_folder'] + '/' + str(
            uuid.uuid1())
        target_folder = folder_name + "/"

        try:
            os.mkdir(folder_name)
        except Exception:
            logger.exception(f'Unable to create outgoing folder {folder_name}')
            monitor.send_event(
                monitor.h_events.PROCESSING, monitor.severity.ERROR,
                f'Unable to create outgoing folder {folder_name}')
            return

        if not Path(folder_name).exists():
            logger.error(f'Creating folder not possible {folder_name}')
            monitor.send_event(monitor.h_events.PROCESSING,
                               monitor.severity.ERROR,
                               f'Creating folder not possible {folder_name}')
            return

        try:
            lock_file = Path(folder_name / mercure_names.LOCK)
            lock = helper.FileLock(lock_file)
        except:
            # Can't create lock file, so something must be seriously wrong
            logger.error(f'Unable to create lock file {lock_file}')
            monitor.send_event(monitor.h_events.PROCESSING,
                               monitor.severity.ERROR,
                               f'Unable to create lock file {lock_file}')
            return

        # Generate task file with dispatch information
        task_filename = target_folder + mercure_names.TASKFILE
        task_json = generate_taskfile_route(series_UID, mercure_options.SERIES,
                                            selected_targets[target],
                                            tags_list, target)

        try:
            with open(task_filename, 'w') as task_file:
                json.dump(task_json, task_file)
        except:
            logger.error(f"Unable to create task file {task_filename}")
            monitor.send_event(monitor.h_events.PROCESSING,
                               monitor.severity.ERROR,
                               f"Unable to create task file {task_filename}")
            continue

        monitor.send_series_event(monitor.s_events.ROUTE, series_UID,
                                  len(file_list), target,
                                  selected_targets[target])

        if move_operation:
            operation = shutil.move
        else:
            operation = shutil.copy

        for entry in file_list:
            try:
                operation(source_folder + entry + mercure_names.DCM,
                          target_folder + entry + mercure_names.DCM)
                operation(source_folder + entry + mercure_names.TAGS,
                          target_folder + entry + mercure_names.TAGS)
            except Exception:
                logger.exception(
                    f'Problem while pushing file to outgoing {entry}')
                logger.exception(f'Source folder {source_folder}')
                logger.exception(f'Target folder {target_folder}')
                monitor.send_event(
                    monitor.h_events.PROCESSING, monitor.severity.ERROR,
                    f'Problem while pushing file to outgoing {entry}')

        monitor.send_series_event(monitor.s_events.MOVE, series_UID,
                                  len(file_list), folder_name, "")

        try:
            lock.free()
        except:
            # Can't delete lock file, so something must be seriously wrong
            logger.error(f'Unable to remove lock file {lock_file}')
            monitor.send_event(monitor.h_events.PROCESSING,
                               monitor.severity.ERROR,
                               f'Unable to remove lock file {lock_file}')
            return
Ejemplo n.º 6
0
def route_series(series_UID):
    """Processes the series with the given series UID from the incoming folder."""
    lock_file = Path(config.mercure['incoming_folder'] + '/' +
                     str(series_UID) + mercure_names.LOCK)

    if lock_file.exists():
        # Series is locked, so another instance might be working on it
        return

    try:
        lock = helper.FileLock(lock_file)
    except:
        # Can't create lock file, so something must be seriously wrong
        logger.error(f'Unable to create lock file {lock_file}')
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR,
                           f'Unable to create lock file {lock_file}')
        return

    logger.info(f'Processing series {series_UID}')

    fileList = []
    seriesPrefix = series_UID + "#"

    # Collect all files belonging to the series
    for entry in os.scandir(config.mercure['incoming_folder']):
        if entry.name.endswith(mercure_names.TAGS) and entry.name.startswith(
                seriesPrefix) and not entry.is_dir():
            stemName = entry.name[:-5]
            fileList.append(stemName)

    logger.info("DICOM files found: " + str(len(fileList)))

    # Use the tags file from the first slice for evaluating the routing rules
    tagsMasterFile = Path(config.mercure['incoming_folder'] + '/' +
                          fileList[0] + mercure_names.TAGS)
    if not tagsMasterFile.exists():
        logger.error(f'Missing file! {tagsMasterFile.name}')
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR,
                           f'Missing file {tagsMasterFile.name}')
        return

    try:
        with open(tagsMasterFile, "r") as json_file:
            tagsList = json.load(json_file)
    except Exception:
        logger.exception(f"Invalid tag information of series {series_UID}")
        monitor.send_series_event(monitor.s_events.ERROR, entry, 0, "",
                                  "Invalid tag information")
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR,
                           f"Invalid tag for series {series_UID}")
        return

    monitor.send_register_series(tagsList)
    monitor.send_series_event(monitor.s_events.REGISTERED, series_UID,
                              len(fileList), "", "")

    discard_series = ""

    # Now test the routing rules and evaluate which rules have been triggered. If one of the triggered
    # rules enforces discarding, discard_series will be True.
    triggered_rules, discard_series = get_triggered_rules(tagsList)

    if (len(triggered_rules) == 0) or (discard_series):
        # If no routing rule has triggered or discarding has been enforced, discard the series
        push_series_discard(fileList, series_UID, discard_series)
    else:
        # Strategy: If only one triggered rule, move files. If multiple, copy files
        push_series_studylevel(triggered_rules, fileList, series_UID, tagsList)
        push_series_serieslevel(triggered_rules, fileList, series_UID,
                                tagsList)

        if (len(triggered_rules) > 1):
            remove_series(fileList)

    try:
        lock.free()
    except:
        # Can't delete lock file, so something must be seriously wrong
        logger.error(f'Unable to remove lock file {lock_file}')
        monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR,
                           f'Unable to remove lock file {lock_file}')
        return
Ejemplo n.º 7
0
def execute(
    source_folder: Path,
    success_folder: Path,
    error_folder: Path,
    retry_max,
    retry_delay,
):
    """
    Execute the dcmsend command. It will create a .sending file to indicate that
    the folder is being sent. This is to prevent double sending. If there
    happens any error the .lock file is deleted and an .error file is created.
    Folder with .error files are _not_ ready for sending.
    """
    target_info = is_ready_for_sending(source_folder)
    delay = target_info.get("next_retry_at", 0)

    if target_info and time.time() >= delay:
        logger.info(f"Folder {source_folder} is ready for sending")

        series_uid = target_info.get("series_uid", "series_uid-missing")
        target_name = target_info.get("target_name", "target_name-missing")

        if (series_uid == "series_uid-missing") or (target_name
                                                    == "target_name-missing"):
            send_event(h_events.PROCESSING, severity.WARNING,
                       f"Missing information for folder {source_folder}")

        # Create a .sending file to indicate that this folder is being sent,
        # otherwise the dispatcher would pick it up again if the transfer is
        # still going on
        lock_file = Path(source_folder) / mercure_names.PROCESSING
        try:
            lock_file.touch()
        except:
            send_event(h_events.PROCESSING, severity.ERROR,
                       f"Error sending {series_uid} to {target_name}")
            send_series_event(s_events.ERROR, series_uid, 0, target_name,
                              "Unable to create lock file")
            logger.exception(f"Unable to create lock file {lock_file.name}")
            return

        command = _create_command(target_info, source_folder)
        logger.debug(f"Running command {command}")
        try:
            run(split(command), check=True)
            logger.info(
                f"Folder {source_folder} successfully sent, moving to {success_folder}"
            )
            # Send bookkeeper notification
            file_count = len(
                list(Path(source_folder).glob(mercure_names.DCMFILTER)))
            send_series_event(
                s_events.DISPATCH,
                target_info.get("series_uid", "series_uid-missing"),
                file_count,
                target_info.get("target_name", "target_name-missing"),
                "",
            )
            _move_sent_directory(source_folder, success_folder)
            send_series_event(s_events.MOVE, series_uid, 0, success_folder, "")
        except CalledProcessError as e:
            dcmsend_error_message = DCMSEND_ERROR_CODES.get(e.returncode, None)
            logger.exception(
                f"Failed command:\n {command} \nbecause of {dcmsend_error_message}"
            )
            send_event(h_events.PROCESSING, severity.ERROR,
                       f"Error sending {series_uid} to {target_name}")
            send_series_event(s_events.ERROR, series_uid, 0, target_name,
                              dcmsend_error_message)
            retry_increased = increase_retry(source_folder, retry_max,
                                             retry_delay)
            if retry_increased:
                lock_file.unlink()
            else:
                logger.info(f"Max retries reached, moving to {error_folder}")
                send_series_event(s_events.SUSPEND, series_uid, 0, target_name,
                                  "Max retries reached")
                _move_sent_directory(source_folder, error_folder)
                send_series_event(s_events.MOVE, series_uid, 0, error_folder,
                                  "")
                send_event(h_events.PROCESSING, severity.ERROR,
                           f"Series suspended after reaching max retries")
    else:
        pass
Ejemplo n.º 8
0
def run_router(args):
    """Main processing function that is called every second."""
    if helper.is_terminated():
        return

    helper.g_log('events.run', 1)

    #logger.info('')
    #logger.info('Processing incoming folder...')

    try:
        config.read_config()
    except Exception:
        logger.exception(
            "Unable to update configuration. Skipping processing.")
        monitor.send_event(monitor.h_events.CONFIG_UPDATE,
                           monitor.severity.WARNING,
                           "Unable to update configuration (possibly locked)")
        return

    filecount = 0
    series = {}
    complete_series = {}

    error_files_found = False

    # Check the incoming folder for completed series. To this end, generate a map of all
    # series in the folder with the timestamp of the latest DICOM file as value
    for entry in os.scandir(config.mercure['incoming_folder']):
        if entry.name.endswith(".tags") and not entry.is_dir():
            filecount += 1
            seriesString = entry.name.split('#', 1)[0]
            modificationTime = entry.stat().st_mtime

            if seriesString in series.keys():
                if modificationTime > series[seriesString]:
                    series[seriesString] = modificationTime
            else:
                series[seriesString] = modificationTime
        # Check if at least one .error file exists. In that case, the incoming folder should
        # be searched for .error files at the end of the update run
        if (not error_files_found) and entry.name.endswith(".error"):
            error_files_found = True

    # Check if any of the series exceeds the "series complete" threshold
    for entry in series:
        if ((time.time() - series[entry]) >
                config.mercure['series_complete_trigger']):
            complete_series[entry] = series[entry]

    #logger.info(f'Files found     = {filecount}')
    #logger.info(f'Series found    = {len(series)}')
    #logger.info(f'Complete series = {len(complete_series)}')
    helper.g_log('incoming.files', filecount)
    helper.g_log('incoming.series', len(series))

    # Process all complete series
    for entry in sorted(complete_series):
        try:
            route_series(entry)
        except Exception:
            logger.exception(f'Problems while processing series {entry}')
            monitor.send_series_event(monitor.s_events.ERROR, entry, 0, "",
                                      "Exception while processing")
            monitor.send_event(monitor.h_events.PROCESSING,
                               monitor.severity.ERROR,
                               "Exception while processing series")
        # If termination is requested, stop processing series after the active one has been completed
        if helper.is_terminated():
            return

    if error_files_found:
        route_error_files()

    # Now, check if studies in the studies folder are ready for routing/processing
    route_studies()