Ejemplo n.º 1
0
    def __init__(self, parent):
        self.win = pytk.Window.__init__(self, parent)
        self.transient(parent)
        self.parent = parent
        self.title('About {}'.format(INFO['name']))
        self.resizable(False, False)
        self.frm = pytk.widgets.Frame(self)
        self.frm.pack(fill='both', expand=True)
        self.frmMain = pytk.widgets.Frame(self.frm)
        self.frmMain.pack(fill='both', padx=1, pady=1, expand=True)

        about_txt = '\n'.join((
            MY_GREETINGS[1:],
            dcmpi.__doc__,
            '{} - ver. {}\n{} {}\n{}'.format(
                INFO['name'], INFO['version'],
                INFO['copyright'], INFO['author'], INFO['notice'])
        ))
        msg(about_txt)
        self.lblInfo = pytk.widgets.Label(
            self.frmMain, text=about_txt, anchor='center',
            background='#333', foreground='#ccc', font='TkFixedFont')
        self.lblInfo.pack(padx=8, pady=8, ipadx=8, ipady=8)

        self.btnClose = pytk.widgets.Button(
            self.frmMain, text='Close', command=self.destroy)
        self.btnClose.pack(side='bottom', padx=8, pady=8)
        self.bind('<Return>', self.destroy)
        self.bind('<Escape>', self.destroy)

        pytk.util.center(self, self.parent)

        self.grab_set()
        self.wait_window(self)
Ejemplo n.º 2
0
 def actionRemove(self, event=None):
     """Action on Click Button Remove."""
     items = self.lsvInput.get_children('')
     selected = self.lsvInput.selection()
     if selected:
         for item in selected:
             self.lsvInput.delete(item)
     elif items:
         self.lsvInput.delete(items[-1])
     else:
         msg('Empty input list!')
Ejemplo n.º 3
0
Archivo: util.py Proyecto: norok2/dcmpi
def postprocess_info(sources,
                     formats,
                     access_val=None,
                     access_val_params=None,
                     verbose=D_VERB_LVL):
    """
    Extract information from DICOM according to specific instruction dict.

    Parameters
    ==========
    sources : dict
        Dictionary containing the information to post-process.
    formats : dict
        | Dictionary containing the following information:
        * key: The name of the information
        * | val: (source_key, format_function, format_function_parameters)
          | - source_key: The key used to retrieve information from source
          | - format_function(val, param): Post-processing function
          | - format_function_parameters: Additional function parameters
    access_val : func(val, params) (optional)
        A function used as an helper to access data in the source dict.
    access_val_params : tuple (optional)
        A tuple of the parameters to be passed to the access_val function.
    verbose : int (optional)
        Set level of verbosity.

    Returns
    =======
    info : dict
        The postprocessed information.

    """
    info = {}
    for pp_id, postproc in sorted(formats.items()):
        src_id, fmt_func, fmt_params = postproc
        if src_id in sources:
            if access_val:
                field_val = access_val(sources[src_id], access_val_params)
            else:
                field_val = sources[src_id]
            try:
                if fmt_func:
                    field_val = fmt_func(field_val, fmt_params)
            except Exception as e:
                print(e)
                msg('W: Unable to post-process `{}`.'.format(src_id), verbose,
                    VERB_LVL['medium'])
        else:
            field_val = 'N/A'
            msg('W: `{}` field not found.'.format(src_id))
        info[pp_id] = field_val
    return info
Ejemplo n.º 4
0
def dcmpi_run(
        in_dirpath,
        out_dirpath,
        subpath=utl.TPL['acquire'],
        import_subpath=utl.ID['dicom'],
        niz_subpath=utl.ID['niz'],
        meta_subpath=utl.ID['meta'],
        prot_subpath=utl.ID['prot'],
        info_subpath=utl.ID['info'],
        report_template=utl.TPL['report'],
        backup_template=utl.TPL['backup'],
        force=False,
        verbose=D_VERB_LVL):
    """
    Standard preprocessing of DICOM files.

    Args:
        in_dirpath (str): Path to input directory.
        out_dirpath (str): Path to output directory.
        subpath ():
        niz_subpath ():
        meta_subpath ():
        prot_subpath ():
        info_subpath ():
        report_subpath ():
        backup_subpath ():
        force (bool): Force new processing.
        verbose (int): Set level of verbosity.

    Returns:
        None.
    """
    from dcmpi.do_acquire_sources import do_acquire_sources
    from dcmpi.do_sorting import sorting

    subdirs = (
        niz_subpath, meta_subpath, prot_subpath, info_subpath, report_template,
        backup_template)
    # import
    dcm_dirpaths = do_acquire_sources(
        in_dirpath, out_dirpath, 'copy', subpath, import_subpath,
        force, verbose)
    for dcm_dirpath in dcm_dirpaths:
        base_dirpath = os.path.dirname(dcm_dirpath)
        # sort
        sorting(
            dcm_dirpath, utl.D_SUMMARY + '.' + utl.EXT['json'],
            force, verbose)
        msg('Done: {}'.format(dcm_dirpath))
Ejemplo n.º 5
0
def send_mail_dcm(dcm_filepath,
                  email_addrs=None,
                  force=False,
                  verbose=D_VERB_LVL):
    """
    Send an email when the measurement was processed.
    """
    recipient_fields = ('OperatorsName', 'PerformingPhysicianName',
                        'ReferringPhysicianName')
    session_fields = (
        ('PatientName', lambda t: t[:4]
         if (t[3] == 'T' or t[3] == 'X') else t),
        ('StudyDate',
         lambda t: time.strftime('%Y-%m-%d', time.strptime(t, '%Y%m%d'))),
        ('StudyTime',
         lambda t: time.strftime('%H-%M', time.strptime(t, '%H%M%S.%f'))),
        ('StationName', lambda t: utl.STATION[t] if t in utl.STATION else t),
        ('StudyDescription', lambda t: t),
    )
    try:
        dcm = pydcm.read_file(dcm_filepath)
        # get recipient
        recipient = ''
        for key in recipient_fields:
            name = getattr(dcm, key) if key in dcm else ''
            if re.match(r'[^@]+@[^@]+\.[^@]+', name):
                recipient = name
                break
        if not recipient:
            raise ValueError('Could not find a recipient.')
        # get session information
        session_info = []
        for key, func in session_fields:
            if key in dcm:
                session_info.append(func(getattr(dcm, key)))
        sample_id = utl.INFO_SEP.join(session_info[:-1])
        study_id = session_info[-1]
        session = '{} / {}'.format(study_id, sample_id)
        # get dirpath
        dirpath = os.sep.join(dcm_filepath.split(os.sep)[:-2])
    except Exception as e:
        print(e)
        msg('E: Could not get information from `{}`.'.format(dcm_filepath))
    else:
        cmd = 'sendmail -t <<{}'.format(
            get_email_txt(email_from=recipient,
                          email_to=recipient,
                          session=session,
                          dirpath=dirpath))
        if email_addrs is None or email_addrs.strip().lower() == \
                recipient.strip().lower():
            subprocess.call(cmd, shell=True)
            msg('I: Email sent to: <{}>.'.format(recipient))
        else:
            print(email_addrs)
            msg('W: Email was NOT sent to: <{}>.'.format(recipient))
            msg(' : (you asked only for recipient <{}>).'.format(email_addrs))
Ejemplo n.º 6
0
def monitor_folder(cmd,
                   dirpath,
                   delay,
                   check,
                   on_added=True,
                   max_count=0,
                   delay_variance=0,
                   force=False,
                   verbose=D_VERB_LVL):
    """
    Monitor changes in a dir and execute a command upon verify some condition.
    """
    def list_dirs(dirpath):
        return [
            d for d in os.listdir(dirpath)
            if os.path.isdir(os.path.join(dirpath, d))
        ]

    sec_in_min = 60

    loop = True
    count = 0
    old_dirs = list_dirs(dirpath)
    msg('Watch: {}'.format(dirpath))
    while loop:
        new_dirs = list_dirs(dirpath)
        removed_dirs = [d for d in old_dirs if d not in new_dirs]
        added_dirs = [d for d in new_dirs if d not in old_dirs]
        timestamp = time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime())
        randomized = random.random() * delay * delay_variance / 100
        sleep_delay = (delay + randomized) * sec_in_min
        if removed_dirs:
            msg(': {}  --  {}'.format(timestamp, removed_dirs),
                fmtt='{t.red}{t.bold}')
        if added_dirs:
            msg(': {}  ++  {}'.format(timestamp, added_dirs),
                fmtt='{t.green}{t.bold}')
        if not removed_dirs and not added_dirs:
            text = 'All quiet on the western front.'
            next_check = time.strftime(
                '%H:%M:%S', time.localtime(time.time() + sleep_delay))
            msg(': {}  ..  {}  (next check in ~{} min, at {})'.format(
                timestamp, text, int(delay + randomized), next_check))
        delta_dirs = added_dirs if on_added else removed_dirs
        for delta in delta_dirs:
            delta_dirpath = os.path.join(dirpath, delta)
            if check and check(delta_dirpath):
                cmd = cmd.format(delta_dirpath)
                subprocess.call(cmd, shell=True)
                count += 1
        time.sleep(sleep_delay)
        if 0 < max_count < count:
            loop = False
        else:
            old_dirs = new_dirs
            loop = True
Ejemplo n.º 7
0
def save_config(
        config,
        cfg_filepath=CFG_FILENAME):
    """

    Args:
        config ():
        cfg_filepath ():

    Returns:

    """
    msg('Save configuration from `{}`.'.format(cfg_filepath))
    dirpath = os.path.dirname(cfg_filepath)
    if not os.path.isdir(dirpath):
        os.makedirs(dirpath)
    with open(cfg_filepath, 'w') as cfg_file:
        json.dump(config, cfg_file, sort_keys=True, indent=4)
Ejemplo n.º 8
0
    def actionRun(self, event=None):
        """Action on Click Button Run."""

        def _name_to_tag(text):
            for ending in ('_subpath', '_template'):
                if text.endswith(ending):
                    text = text[:-len(ending)]
            return text

        # TODO: redirect stdout to some log box / use progressbar
        # extract options
        force = self.wdgOptions['force']['chk'].get_val()
        msg('Force: {}'.format(force))
        verbose = VERB_LVL[self.wdgOptions['verbose']['spb'].get_val()]
        msg('Verb.: {}'.format(verbose))
        if self.cfg['use_mp']:
            # parallel
            pool = multiprocessing.Pool(processes=self.cfg['num_processes'])
            proc_results = []
        in_dirpaths = self.lsvInput.get_items()
        for in_dirpath in in_dirpaths:
            kws = {
                name: info['ent'].get_val()
                for name, info in self.wdgModules.items()}
            triggered = collections.OrderedDict(
                [(_name_to_tag(name), ACTIONS[_name_to_tag(name)])
                 for name, info in self.wdgModules.items()
                 if info['chk'].get_val() and _name_to_tag(name) in ACTIONS])
            kws.update({
                'in_dirpath': in_dirpath,
                'out_dirpath': os.path.expanduser(self.txtPath.get()),
                'subpath': self.txtSubpath.get(),
                'actions': triggered,
                'force': force,
                'verbose': verbose,
            })
            if self.cfg['use_mp']:
                proc_results.append(pool.apply_async(dcmpi_run, kwds=kws))
            else:
                dcmpi_run(**kws)
        # print(proc_results)
        if self.cfg['use_mp']:
            res_list = [proc_result.get() for proc_result in proc_results]
        return
Ejemplo n.º 9
0
def load_config(
        cfg_filepath=CFG_FILENAME):
    """

    Args:
        cfg_filepath ():

    Returns:

    """
    cfg = {}
    if os.path.exists(cfg_filepath):
        msg('Load configuration from `{}`.'.format(cfg_filepath))
        try:
            with open(cfg_filepath, 'r') as cfg_file:
                cfg = json.load(cfg_file)
        except json.JSONDecodeError:
            pass
    return cfg
Ejemplo n.º 10
0
def main(ui_mode=None):
    """
    Main entry point for the script.
    """
    # :: handle program parameters
    arg_parser = handle_arg()
    args = arg_parser.parse_args()
    # :: print debug info
    if args.verbose >= VERB_LVL['debug']:
        arg_parser.print_help()
        msg('\nARGS: ' + str(vars(args)), args.verbose, VERB_LVL['debug'])
    msg(__doc__.strip())
    begin_time = datetime.datetime.now()

    if not ui_mode:
        ui_mode = args.ui_mode

    if utl.has_graphics(ui_mode):
        dcmpi_run_gui(args)
    elif utl.has_term(ui_mode):
        dcmpi_run_tui(args)
    else:
        dcmpi_run_cli(args)

    exec_time = datetime.datetime.now() - begin_time
    msg('ExecTime: {}'.format(exec_time), args.verbose, VERB_LVL['debug'])
Ejemplo n.º 11
0
 def actionRun(self, event=None):
     """Action on Click Button Run."""
     # TODO: redirect stdout to some log box / use progressbar
     # extract options
     force = self.wdgOptions['force']['chk'].get_val()
     msg('Force: {}'.format(force))
     verbose = VERB_LVL[self.wdgOptions['verbose']['spb'].get_val()]
     msg('Verb.: {}'.format(verbose))
     if self.cfg['use_mp']:
         # parallel
         pool = multiprocessing.Pool(processes=self.cfg['num_processes'])
         proc_result_list = []
     for in_dirpath in self.lsvInput.get_items():
         kws = {
             name: info['ent'].get_val()
             for name, info in self.wdgModules.items()}
         kws.update({
             'in_dirpath': in_dirpath,
             'out_dirpath': os.path.expanduser(self.entPath.get()),
             'subpath': self.entSubpath.get(),
             'force': force,
             'verbose': verbose,
         })
         # print(kws)
         if self.cfg['use_mp']:
             proc_result = pool.apply_async(
                 dcmpi_run, kwds=kws)
             proc_result_list.append(proc_result)
         else:
             dcmpi_run(**kws)
     # print(proc_result_list)
     if self.cfg['use_mp']:
         res_list = []
         for proc_result in proc_result_list:
             res_list.append(proc_result.get())
     return
Ejemplo n.º 12
0
def main():
    """
    Main entry point for the script.
    """
    # :: handle program parameters
    arg_parser = handle_arg()
    args = arg_parser.parse_args()
    # :: print debug info
    if args.verbose >= VERB_LVL['debug']:
        arg_parser.print_help()
        msg('\nARGS: ' + str(vars(args)), args.verbose, VERB_LVL['debug'])
    msg(__doc__.strip())
    begin_time = datetime.datetime.now()

    dcmpi_update(args.dirpath, args.dcm_info, args.backup_prefix, args.verbose)

    exec_time = datetime.datetime.now() - begin_time
    msg('ExecTime: {}'.format(exec_time), args.verbose, VERB_LVL['debug'])
Ejemplo n.º 13
0
def main():
    """
    Main entry point for the script.
    """
    # :: handle program parameters
    arg_parser = handle_arg()
    args = arg_parser.parse_args()
    # :: print debug info
    if args.verbose >= VERB_LVL['debug']:
        arg_parser.print_help()
        msg('\nARGS: ' + str(vars(args)), args.verbose, VERB_LVL['debug'])
    msg(__doc__.strip())
    begin_time = datetime.datetime.now()

    get_meta(args.in_dirpath, args.out_dirpath, args.method, args.type_ext,
             args.force, args.verbose)

    exec_time = datetime.datetime.now() - begin_time
    msg('ExecTime: {}'.format(exec_time), args.verbose, VERB_LVL['debug'])
Ejemplo n.º 14
0
def main():
    """
    Main entry point for the script.
    """
    # :: handle program parameters
    arg_parser = handle_arg()
    args = arg_parser.parse_args()
    # :: print debug info
    if args.verbose >= VERB_LVL['debug']:
        arg_parser.print_help()
        msg('\nARGS: ' + str(vars(args)), args.verbose, VERB_LVL['debug'])
    msg(__doc__.strip())

    begin_time = datetime.datetime.now()

    monitor_folder(args.cmd, args.dir, args.delay,
                   lambda x: utl.find_a_dicom(x)[0], True, args.max_count,
                   args.delay_var, args.force, args.verbose)

    exec_time = datetime.datetime.now() - begin_time
    msg('ExecTime: {}'.format(exec_time), args.verbose, VERB_LVL['debug'])
Ejemplo n.º 15
0
def main():
    """
    Main entry point for the script.
    """
    # :: handle program parameters
    arg_parser = handle_arg()
    args = arg_parser.parse_args()
    # fix verbosity in case of 'quiet'
    if args.quiet:
        args.verbose = VERB_LVL['none']
    # :: print debug info
    if args.verbose >= VERB_LVL['debug']:
        arg_parser.print_help()
        msg('\nARGS: ' + str(vars(args)), args.verbose, VERB_LVL['debug'])
    msg(__doc__.strip())
    begin_time = datetime.datetime.now()

    kws = vars(args)
    kws.pop('quiet')
    do_backup(**kws)

    exec_time = datetime.datetime.now() - begin_time
    msg('ExecTime: {}'.format(exec_time), args.verbose, VERB_LVL['debug'])
Ejemplo n.º 16
0
def get_info(
        in_dirpath,
        out_dirpath,
        method='pydicom',
        type_ext=False,
        force=False,
        verbose=D_VERB_LVL):
    """
    Extract protocol information from DICOM files and store them as text files.

    Parameters
    ==========
    in_dirpath : str
        Path to input directory.
    out_dirpath : str
        Path to output directory.
    method : str (optional)
        | Extraction method. Accepted values:
        * pydicom: Use PyDICOM Python module.
    type_ext : boolean (optional)
        Add type extension to filename.
    force : boolean (optional)
        Force new processing.
    verbose : int (optional)
        Set level of verbosity.

    Returns
    =======
    None.

    """
    msg(':: Exporting CUSTOM information ({})...'.format(method))
    msg('Input:  {}'.format(in_dirpath))
    msg('Output: {}'.format(out_dirpath))
    sources = utl.dcm_sources(in_dirpath)
    groups = utl.group_series(in_dirpath)
    # proceed only if output is not likely to be there
    if not os.path.exists(out_dirpath) or force:
        # :: create output directory if not exists
        if not os.path.exists(out_dirpath):
            os.makedirs(out_dirpath)

        if method == 'pydicom':
            # :: extract session information
            out_filepath = os.path.join(
                out_dirpath, utl.D_SUMMARY + '.' + utl.ID['info'])
            out_filepath += ('.' + utl.EXT['json']) if type_ext else ''
            info = {'_measurements': groups}
            # info['_sources'] = sources  # DEBUG
            try:
                read_next_dicom = True
                idx = -1
                while read_next_dicom:
                    # get last dicom
                    in_filepath = sorted(sources.items())[idx][1][-1]
                    dcm = pydcm.read_file(in_filepath)
                    stop = 'PixelData' in dcm and \
                           'ImageType' in dcm and 'ORIGINAL' in dcm.ImageType
                    if stop:
                        read_next_dicom = False
                    else:
                        idx -= 1

            except Exception as e:
                print(sources)
                msg('E: failed during get_info (exception: {})'.format(e))
            else:
                # DICOM's-ready information
                info.update(utl.postprocess_info(
                    dcm, custom_info.SESSION,
                    lambda x, p: str(x.value), verbose))
                # additional information: duration
                field_id = 'Duration'
                try:
                    begin_time = time.mktime(time.strptime(
                        info['BeginDate'] + '_' + info['BeginTime'],
                        '%Y-%m-%d_%H:%M:%S'))
                    end_time = time.mktime(time.strptime(
                        info['EndDate'] + '_' + info['EndTime'],
                        '%Y-%m-%d_%H:%M:%S'))
                    field_val = str(
                        datetime.timedelta(0, end_time - begin_time))
                except Exception as e:
                    print(e)
                    field_val = 'N/A'
                    msg('W: Cannot process `{}`.'.format(field_id),
                        verbose, VERB_LVL['medium'])
                finally:
                    info[field_id] = field_val
            msg('Info: {}'.format(out_filepath[len(out_dirpath):]))
            with open(out_filepath, 'w') as info_file:
                json.dump(info, info_file, sort_keys=True, indent=4)

            # :: extract acquisition information
            for group_id, group in sorted(groups.items()):
                out_filepath = os.path.join(
                    out_dirpath, group_id + '.' + utl.ID['info'])
                out_filepath += ('.' + utl.EXT['json']) if type_ext else ''
                info = {'_series': group}
                in_filepath = sorted(
                    sources[groups[group_id][0]])[-1]
                try:
                    dcm = pydcm.read_file(in_filepath)
                except Exception as e:
                    print(e)
                    msg('E: failed processing \'{}\''.format(in_filepath))
                else:
                    info.update(utl.postprocess_info(
                        dcm, custom_info.ACQUISITION,
                        lambda x, p: str(x.value), verbose))
                    # information from protocol
                    if utl.DCM_ID['hdr_nfo'] in dcm:
                        prot_src = dcm[utl.DCM_ID['hdr_nfo']].value
                        prot = utl.parse_protocol(
                            utl.get_protocol(prot_src))
                    else:
                        prot = {}
                    info.update(utl.postprocess_info(
                        prot, custom_info.get_sequence_info(info, prot),
                        None, verbose))
                    # additional information: duration
                    field_id = 'Duration'
                    try:
                        begin_time = time.mktime(time.strptime(
                            info['BeginDate'] + '_' +
                            info['BeginTime'],
                            '%Y-%m-%d_%H:%M:%S'))
                        end_time = time.mktime(time.strptime(
                            info['EndDate'] + '_' +
                            info['EndTime'],
                            '%Y-%m-%d_%H:%M:%S'))
                        field_val = str(
                            datetime.timedelta(0, end_time - begin_time))
                    except Exception as e:
                        print(e)
                        field_val = 'N/A'
                        msg('W: Cannot process `{}`.'.format(field_id),
                            verbose, VERB_LVL['medium'])
                    finally:
                        info[field_id] = field_val
                msg('Info: {}'.format(out_filepath[len(out_dirpath):]))
                with open(out_filepath, 'w') as info_file:
                    json.dump(info, info_file, sort_keys=True, indent=4)

            # :: extract series information
            for src_id, in_filepath_list in sorted(sources.items()):
                out_filepath = os.path.join(
                    out_dirpath, src_id + '.' + utl.ID['info'])
                out_filepath += ('.' + utl.EXT['json']) if type_ext else ''
                info = {}
                for acq, series in groups.items():
                    if src_id in series:
                        info['_acquisition'] = acq
                    try:
                        dcm = pydcm.read_file(in_filepath)
                    except Exception as e:
                        print(e)
                        msg('E: failed processing `{}`'.format(in_filepath))
                    else:
                        info.update(utl.postprocess_info(
                            dcm, custom_info.SERIES, lambda x, p: x.value,
                            verbose))
                msg('Info: {}'.format(out_filepath[len(out_dirpath):]))
                with open(out_filepath, 'w') as info_file:
                    json.dump(info, info_file, sort_keys=True, indent=4)
        else:
            msg('W: Unknown method `{}`.'.format(method))
    else:
        msg('I: Skipping existing output path. Use `force` to override.')
Ejemplo n.º 17
0
def dcmpi_run(
        in_dirpath,
        out_dirpath,
        subpath=utl.TPL['acquire'],
        dcm_subpath=utl.ID['dicom'],
        niz_subpath=utl.ID['niz'],
        meta_subpath=utl.ID['meta'],
        prot_subpath=utl.ID['prot'],
        info_subpath=utl.ID['info'],
        report_template=utl.TPL['report'],
        backup_template=utl.TPL['backup'],
        actions=ACTIONS,
        force=False,
        verbose=D_VERB_LVL):
    """
    Standard preprocessing of DICOM files.

    Args:
        in_dirpath (str): Path to input directory.
        out_dirpath (str): Path to output directory.
        subpath (str):
        dcm_subpath (str):
        niz_subpath (str):
        meta_subpath (str):
        prot_subpath (str):
        info_subpath (str):
        report_template (str):
        backup_template (str):
        actions (dict):
        force (bool): Force new processing.
        verbose (int): Set level of verbosity.

    Returns:
        None.
    """
    from dcmpi.do_acquire_sources import do_acquire_sources
    from dcmpi.do_sorting import sorting

    # import
    dcm_dirpaths = do_acquire_sources(
        in_dirpath, out_dirpath, 'copy', subpath, dcm_subpath, force, verbose)
    for dcm_dirpath in dcm_dirpaths:
        base_dirpath = os.path.dirname(dcm_dirpath)
        # sort
        sorting(
            dcm_dirpath, utl.D_SUMMARY + '.' + utl.EXT['json'],
            force, verbose)
        # run other actions
        dirpath = {
            'niz': niz_subpath,
            'meta': meta_subpath,
            'prot': prot_subpath,
            'info': info_subpath, }
        dirpath = {
            k: os.path.join(base_dirpath, v) for k, v in dirpath.items() if v}
        for action, (func, kws) in actions.items():
            kws = kws.copy()
            for key, val in kws.items():
                if isinstance(val, str):
                    kws[key] = fmtm(val)
            kws.update(dict(force=force, verbose=verbose))
            try:
                func(**kws)
            except Exception as e:
                warnings.warn(str(e))

        msg('Done: {}'.format(dcm_dirpath))
Ejemplo n.º 18
0
def dcmpi_update(dirpath,
                 dcm_info=None,
                 backup_prefix='~',
                 verbose=D_VERB_LVL):
    """
    Modify selected DICOM fields of files within a directory.

    Args:
        dirpath (str): Path to input directory.
        dcm_info (str): JSON encoded dictionary of DICOM fields to update.
            If None, DICOM files are left untouched.
        backup_prefix (str): Prefix to use for backup files.
        verbose (int): Set level of verbosity.

    Returns:
        None.

    See Also:
        utl.is_dicom, utl.is_compressed_dicom
    """
    def get_filepaths(path):
        for root, dirs, files in os.walk(path):  # no need to sort
            for name in files:
                yield os.path.join(root, name)

    msg(':: Updating DICOMs...')
    msg('Path: {}'.format(os.path.realpath(dirpath)), verbose, VERB_LVL['low'])

    if os.path.exists(dirpath) and dcm_info:
        # load DICOM field to update
        dcm_info = json.loads(dcm_info)
        # :: analyze directory tree
        for filepath in get_filepaths(dirpath):
            basepath, filename = os.path.split(filepath)
            msg('Analyzing `{}`...'.format(filepath), verbose,
                VERB_LVL['debug'])
            if backup_prefix:
                backup_filepath = os.path.join(basepath,
                                               backup_prefix + filename)
                shutil.copy(filepath, backup_filepath)
                msg('Backup `{}`'.format(backup_filepath), verbose,
                    VERB_LVL['medium'])
            is_dicom = utl.is_dicom(filepath,
                                    allow_dir=False,
                                    allow_report=True,
                                    allow_postprocess=True)
            if not is_dicom:
                is_compressed, compression = utl.is_compressed_dicom(
                    filepath,
                    allow_dir=False,
                    allow_report=True,
                    allow_postprocess=True)
            else:
                is_compressed = False
                compression = None
            if is_dicom or is_compressed and compression in utl.COMPRESSIONS:
                if is_compressed and compression in utl.COMPRESSIONS:
                    dcm_filepath = os.path.splitext(filepath)[0]
                    cmd = utl.COMPRESSIONS[compression]['bwd'] + ' {}'.format(
                        filepath)
                    utl.execute(cmd)
                    msg('Uncompressing: `{}`'.format(dcm_filepath), verbose,
                        VERB_LVL['high'])
                else:
                    dcm_filepath = filepath
                try:
                    dcm = pydcm.read_file(dcm_filepath)
                    for key, val in dcm_info.items():
                        if key in dcm:
                            setattr(dcm, key, str(val))
                        else:
                            msg('W: DICOM attr: `` not found.', verbose,
                                VERB_LVL['medium'])
                    dcm.save_as(dcm_filepath)
                except Exception as e:
                    print(e)
                    msg('E: Could not open DICOM: {}.'.format(dcm_filepath))
                finally:
                    if is_compressed and compression in utl.COMPRESSIONS:
                        cmd = utl.COMPRESSIONS[compression]['fwd'] + \
                              ' {}'.format(dcm_filepath)
                        utl.execute(cmd)
                        msg('Compressing: `{}`'.format(filepath), verbose,
                            VERB_LVL['high'])
                    if dcm_filepath != filepath and \
                            os.path.isfile(dcm_filepath):
                        os.remove(dcm_filepath)
            else:
                subpath = filepath[len(dirpath):]
                msg('W: Invalid source found `{}`'.format(subpath), verbose,
                    VERB_LVL['medium'])
    else:
        msg('W: Input path does NOT exists.', verbose, VERB_LVL['low'])
Ejemplo n.º 19
0
Archivo: util.py Proyecto: norok2/dcmpi
def group_series(dirpath, save_filepath=None, force=False, verbose=D_VERB_LVL):
    """
    Group series according to acquisition.
    """
    summary_filepath = os.path.join(dirpath, save_filepath) \
        if save_filepath else ''
    if os.path.exists(summary_filepath) and not force:
        # :: load grouping from file
        groups = {}
        with open(summary_filepath, 'r') as summary_file:
            groups = json.load(summary_file)
    else:
        sources_dict = dcm_sources(dirpath)
        groups = {}
        group_num = 1
        last_time = 0
        last_prot_name = ''
        for src_id, sources in sorted(sources_dict.items()):
            src_dcm = sources[0]
            try:
                dcm = pydcm.read_file(src_dcm)
            except Exception as e:
                print(e)
                msg('W: failed processing `{}`'.format(src_dcm), verbose,
                    VERB_LVL['medium'])
            else:
                is_acquisition = DCM_ID['TA'] in dcm \
                                 and 'AcquisitionDate' in dcm \
                                 and 'AcquisitionTime' in dcm \
                                 and 'ProtocolName' in dcm
                is_report = 'SeriesDescription' in dcm \
                            and not DCM_ID['pixel_data'] in dcm
                if is_acquisition:
                    curr_time = get_datetime_sec(dcm.AcquisitionDate +
                                                 dcm.AcquisitionTime)
                    curr_prot_name = dcm.ProtocolName
                    is_new_group = (curr_time - last_time > GRACE_PERIOD) or \
                                   (curr_prot_name != last_prot_name)
                    if is_new_group:
                        group_id = INFO_SEP.join((
                            PREFIX_ID['acq'] +
                            '{:0{size}d}'.format(group_num, size=D_NUM_DIGITS),
                            dcm.ProtocolName))
                        if group_id not in groups:
                            groups[group_id] = []
                        group_num += 1
                    # print('{:32s}\t{:32s}'.format(group_id, src_id))
                    groups[group_id].append(src_id)
                    last_time = curr_time
                    last_prot_name = curr_prot_name
                    # last_duration = get_duration(dcm[DCM_ID['TA']])
                elif is_report:
                    group_id = dcm.SeriesDescription
                    if group_id not in groups:
                        groups[group_id] = []
                    groups[group_id].append(src_id)
        # :: save grouping to file
        if summary_filepath:
            msg('Brief: {}'.format(summary_filepath))
            with open(summary_filepath, 'w') as summary_file:
                json.dump(groups, summary_file, sort_keys=True, indent=4)
    return groups
Ejemplo n.º 20
0
def get_nifti(in_dirpath,
              out_dirpath,
              method='dcm2niix',
              compressed=True,
              merged=True,
              force=False,
              verbose=D_VERB_LVL):
    """
    Extract images from DICOM files and store them as NIfTI images.

    Args:
        in_dirpath (str): Input path containing sorted DICOM files.
        out_dirpath (str): Output path where to store NIfTI images.
        method (str): DICOM to NIfTI conversion method.
            Accepted values:
             - 'dicom2nifti': use pure Python converter.
             - 'isis': use Enrico Reimer's ISIS tool.
                https://github.com/isis-group/isis
             - 'dcm2nii': Use Chris Rorden's `dcm2nii` tool (old version).
             - 'dcm2niix': Use Chris Rorden's `dcm2niix` tool (new version).
        compressed (bool): Produce compressed NIfTI using GNU Zip.
            The resulting files will have `.nii.gz` extension.
        merged (bool): Merge images in the 4th dimension.
            Not supported by all methods.
        force (bool): Force computation to be re-done.
        verbose (int): Set level of verbosity.

    Returns:

    """
    msg(':: Exporting NIfTI images ({})...'.format(method))
    msg('Input:  {}'.format(in_dirpath))
    msg('Output: {}'.format(out_dirpath))
    # proceed only if output is not likely to be there
    if not os.path.exists(out_dirpath) or force:
        # :: create output directory if not exists and extract images
        if not os.path.exists(out_dirpath):
            os.makedirs(out_dirpath)
        sources = utl.dcm_sources(in_dirpath)
        d_ext = '.' + utl.EXT['niz'] if compressed else utl.EXT['nii']

        # :: extract nifti
        if method == 'dicom2nifti':
            for src_id in sorted(sources.keys()):
                in_filepath = os.path.join(in_dirpath, src_id)
                out_filepath = os.path.join(out_dirpath, src_id + d_ext)
                dicom2nifti.dicom_series_to_nifti(in_filepath,
                                                  out_filepath,
                                                  reorient_nifti=True)

        elif method == 'dcm2nii':
            for src_id in sorted(sources.keys()):
                in_filepath = os.path.join(in_dirpath, src_id)
                # produce nifti file
                opts = ' -f n '  # influences the filename
                opts += ' -t n -p n -i n -d n -e y'
                opts += ' -4 ' + 'y' if merged else 'n'
                opts += ' -g ' + 'y' if compressed else 'n'
                cmd = method + ' {} -o {} {}'.format(opts, out_dirpath,
                                                     in_filepath)
                ret_val, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
                term_str = 'GZip...' if compressed else 'Saving '
                lines = p_stdout.split('\n') if p_stdout else ()
                # parse result
                old_names = []
                for line in lines:
                    if term_str in line:
                        old_name = line[line.find(term_str) + len(term_str):]
                        old_names.append(old_name)
                if old_names:
                    msg('Parsed names: ', verbose, VERB_LVL['debug'])
                    msg(''.join([': {}\n'.format(n) for n in old_names]),
                        verbose, VERB_LVL['debug'])
                else:
                    msg('E: Could not locate filename in `dcm2nii`.')
                if len(old_names) == 1:
                    old_filepath = os.path.join(out_dirpath, old_names[0])
                    out_filepath = os.path.join(out_dirpath, src_id + d_ext)
                    msg('NIfTI: {}'.format(out_filepath[len(out_dirpath):]))
                    os.rename(old_filepath, out_filepath)
                else:
                    for num, old_name in enumerate(old_names):
                        old_filepath = os.path.join(out_dirpath, old_name)
                        out_filepath = os.path.join(
                            out_dirpath,
                            src_id + utl.INFO_SEP + str(num + 1) + d_ext)
                        msg('NIfTI: {}'.format(
                            out_filepath[len(out_dirpath):]))
                        os.rename(old_filepath, out_filepath)

        elif method == 'dcm2niix':
            for src_id in sorted(sources.keys()):
                in_filepath = os.path.join(in_dirpath, src_id)
                # produce nifti file
                opts = ' -f __img__ '  # set the filename
                opts += ' -9 -t n -p y -i n -d n -b n '
                opts += ' -z ' + 'y' if compressed else 'n'
                cmd = method + ' {} -o {} {}'.format(opts, out_dirpath,
                                                     in_filepath)
                utl.execute(cmd, verbose=verbose)
                old_names = glob.glob(
                    os.path.join(out_dirpath,
                                 '__img__*.nii' + '.gz' if compressed else ''))
                if len(old_names) == 1:
                    old_filepath = os.path.join(out_dirpath, old_names[0])
                    out_filepath = os.path.join(out_dirpath, src_id + d_ext)
                    msg('NIfTI: {}'.format(out_filepath[len(out_dirpath):]))
                    os.rename(old_filepath, out_filepath)
                else:
                    for num, old_name in enumerate(old_names):
                        old_filepath = os.path.join(out_dirpath, old_name)
                        out_filepath = os.path.join(
                            out_dirpath,
                            src_id + utl.INFO_SEP + str(num + 1) + d_ext)
                        msg('NIfTI: {}'.format(
                            out_filepath[len(out_dirpath):]))
                        os.rename(old_filepath, out_filepath)

        elif method == 'isis':
            for src_id in sorted(sources.keys()):
                in_filepath = os.path.join(in_dirpath, src_id)
                out_filepath = os.path.join(out_dirpath, src_id + d_ext)
                cmd = 'isisconv -in {} -out {}'.format(in_filepath,
                                                       out_filepath)
                ret_val, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
                if merged:
                    # TODO: implement volume merging
                    msg('W: (isisconv) merging after not implemented.',
                        verbose, VERB_LVL['medium'])

        else:
            msg('W: Unknown method `{}`.'.format(method))
    else:
        msg('I: Skipping existing output path. Use `force` to override.')
Ejemplo n.º 21
0
def do_report(in_dirpath,
              out_dirpath,
              basename='{name}_{date}_{time}_{sys}',
              method='pydicom',
              file_format='pdf',
              force=False,
              verbose=D_VERB_LVL):
    """
    Extract protocol information from DICOM files and store them as text files.

    Parameters
    ==========
    in_dirpath : str
        Path to input directory.
    out_dirpath : str
        Path to output directory.
    method : str (optional)
        | Extraction method. Accepted values:
        * pydicom: Use PyDICOM Python module.
    file_format : str (optional)
        | Output format. HTML will always be present. Accepted values:
        * pydicom: Use PyDICOM Python module.
    force : boolean (optional)
        Force new processing.
    verbose : int (optional)
        Set level of verbosity.

    Returns
    =======
    None.

    """
    msg(':: Creating HTML and PDF report...')
    msg('Input:  {}'.format(in_dirpath))
    msg('Output: {}'.format(out_dirpath))

    dcm_filename, compression = utl.find_a_dicom(in_dirpath)
    out_basename = utl.fill_from_dicom(basename, dcm_filename)
    html_basename = out_basename + '.htm'
    msg('HTML: {}'.format(html_basename))
    out_filepath = os.path.join(out_dirpath, html_basename)

    # proceed only if output is not likely to be there
    if not os.path.exists(out_filepath) or force:
        # :: create output directory if not exists and extract images
        if not os.path.exists(out_dirpath):
            os.makedirs(out_dirpath)

        # :: get information
        summary, extra = {}, {}
        acquisitions = []
        if method == 'pydicom':
            # :: create temporary info data
            from dcmpi.get_info import get_info
            info_dirpath = os.path.join(os.path.dirname(in_dirpath),
                                        utl.ID['info'])
            get_info(in_dirpath,
                     info_dirpath,
                     method,
                     force=force,
                     verbose=verbose)
            for name in sorted(os.listdir(info_dirpath)):
                target = os.path.join(info_dirpath, name)
                with open(target, 'r') as target_file:
                    if name.startswith('summary.info'):
                        summary = json.load(target_file)
                    elif name.startswith('extra.info'):
                        extra = json.load(target_file)
                    elif name.startswith('a'):
                        acquisitions.append((name[:name.find(utl.INFO_SEP)],
                                             json.load(target_file)))

        else:
            msg('W: Unknown method `{}`.'.format(method))

        # :: create report
        tpl_dirpath = os.path.join(os.path.dirname(__file__),
                                   'report_templates')

        if summary and acquisitions and os.path.isdir(tpl_dirpath):
            # :: always create HTML report
            # import templates
            template = {
                'report': 'report_template.html',
                'acq': 'acquisition_template.html',
                'acq_param': 'acquisition_parameter_template.html',
            }
            for key, filename in template.items():
                tpl_filepath = os.path.join(tpl_dirpath, filename)
                with open(tpl_filepath, 'r') as tpl_file:
                    template[key] = tpl_file.read()
            # replace tags
            acq_html = ''
            for n_acq, acq in acquisitions:
                acq_param_html = ''
                for key, val in sorted(get_param(acq).items(), key=sort_param):
                    acq_param_html += template['acq_param'].replace(
                        '[ACQ-PARAM-KEY]',
                        key).replace('[ACQ-PARAM-VAL]', val)
                tags = {
                    '[ACQ-ID]':
                    n_acq,
                    '[ACQ-TIME]':
                    acq['AcquisitionTime'],
                    '[ACQ-PROTOCOL]':
                    acq['ProtocolName'],
                    '[ACQ-SERIES]':
                    ', '.join([
                        series[:series.find(utl.INFO_SEP)]
                        for series in acq['_series']
                    ]),
                    '[ACQUISITION-PARAMETER-TEMPLATE]':
                    acq_param_html,
                }
                tmp_acq_html = template['acq']
                for tag, val in tags.items():
                    tmp_acq_html = tmp_acq_html.replace(tag, val)
                acq_html += tmp_acq_html
            report_html = template['report']
            tags = {
                '[TIMESTAMP]': time.strftime('%c UTC', time.gmtime()),
                '[SESSION-INFO]': get_session(info_dirpath, summary),
                '[CUSTOM-PIL]':
                    extra['pil'] if 'pil' in extra else \
                        html_input('text', '{"maxlength": 4}'),
                '[CUSTOM-U-ID]':
                    extra['uid'] if 'uid' in extra else \
                        html_input('text', '{"maxlength": 12}'),
                '[CUSTOM-B-ID]':
                    extra['bid'] if 'bid' in extra else \
                        html_input('text', '{"maxlength": 4}'),
                '[CUSTOM-NUM-7T]':
                    extra['pil'] if 'pil' in extra else \
                        html_input('text', '{"maxlength": 3}'),
                '[PATIENT-NAME]': summary['PatientName'],
                '[PATIENT-ID]': summary['PatientID'],
                '[PATIENT-SEX]': summary['PatientSex'],
                '[PATIENT-AGE]': summary['PatientAge'],
                '[PATIENT-BIRTH-DATE]': summary['PatientBirthDate'],
                '[PATIENT-WEIGHT]': summary['PatientWeight'],
                '[PATIENT-HEIGHT]': summary['PatientHeight'],
                '[MAGNETIC-FIELD-STRENGTH]':
                    summary['NominalMagneticFieldStrength'],
                '[SYSTEM-NAME]': summary['StationName'],
                '[SYSTEM-ID]': summary['StationID'],
                '[LOCATION]': summary['InstitutionName'],
                '[COIL-SYSTEM]': summary['CoilSystem'],
                '[EARPHONES]': check_box('Earphones', extra),
                '[PADS]': check_box('Pads', extra),
                '[PULSE-OXIMETER]': check_box('PulseOximeter', extra),
                '[PROJECTOR]': check_box('Projector', extra),
                '[EXT-COMPUTER]': check_box('ExtComputer', extra),
                '[PARALLEL-TX]': check_box('ParallelTX', extra),
                '[OTHERS]':
                    (extra['Others'] if 'Others' in extra and extra['Others'] \
                         else html_input('text')) + html_input('text'),
                '[STUDY-NAME]': html_input('text', '{"maxlength": 16}'),
                '[STUDY-ID]': html_input('text', '{"maxlength": 16}'),
                '[STUDY-DESCR]': summary['StudyDescription'],
                '[BEGIN-DATE]': summary['BeginDate'],
                '[BEGIN-TIME]': summary['BeginTime'],
                '[END-DATE]': summary['EndDate'],
                '[END-TIME]': summary['EndTime'],
                '[DURATION]': summary['Duration'],
                '[PERFORMER]': summary['Performer'],
                '[OPERATOR]': summary['Operator'],
                '[ACQUISITION-TEMPLATE]': acq_html
            }
            for tag, val in tags.items():
                if val == 'N/A':
                    val = ''
                report_html = report_html.replace(tag, val)
            # todo: improve filename (e.g. from upper folder or recalculate)

            with open(out_filepath, 'w') as html_file:
                html_file.write(report_html)

            if file_format == 'pdf':
                pdf_filename = out_basename + '.pdf'
                msg('Report: {}'.format(pdf_filename))
                pdf_filepath = os.path.join(out_dirpath, pdf_filename)
                opts = (
                    ' --page-size {}'.format('A4'),
                    ' --margin-bottom {}'.format('15mm'),
                    ' --margin-left {}'.format('15mm'),
                    ' --margin-right {}'.format('15mm'),
                    ' --margin-top {}'.format('15mm'),
                    # ' --no-pdf-compression',  # n/a in Ubuntu 14.04
                )
                cmd = 'wkhtmltopdf {} {} {}'.format(' '.join(opts),
                                                    out_filepath, pdf_filepath)
                ret_val, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)

            else:
                msg('W: Unknown format `{}`.'.format(file_format))

        else:
            msg('W: Acquisition information not found.')
Ejemplo n.º 22
0
def do_acquire_sources(
        in_dirpath,
        out_dirpath,
        method='symlink',
        subpath='{study}/{name}_{date}_{time}_{sys}',
        extra_subpath=util.ID['dicom'],
        force=False,
        verbose=D_VERB_LVL):
    """
    Get all DICOM within an input directory.

    Args:
        in_dirpath (str): Path to input directory.
        out_dirpath (str): Path to output directory.
        method (str): DICOM sources fetch method.
            Available options are:
             - 'copy': Copy files from source to destination
             - 'move': Move files from source to destination (use with care!)
             - 'symlink': Create a symbolic link (must be supported by OS)
        subpath (str): Extra subpath to append to output dirpath.
            Extract and interpret fields from DICOM, according to field
            specifications: <field::format>.
            For more info on the accepted syntax, see
            `utils.fill_from_dicom()`.
        extra_subpath (str):
        force (bool): Force new processing.
        verbose (int): Set level of verbosity.

    Returns:
        dcm_dirpaths : str set
        Paths to directories containing DICOM files separated by session.

    See Also:
        utils.fill_from_dicom,
        utils.find_a_dicom
    """

    def get_filepaths(dirpath):
        for root, dirs, files in os.walk(in_dirpath):  # no need to sort
            for name in files:
                yield os.path.join(root, name)

    msg(':: Importing sources...')
    msg('Input:  {}'.format(in_dirpath))
    msg('Output: {}'.format(out_dirpath))

    method = method.lower()

    if method == 'move':
        msg('W: Files will be moved!', fmtt='{t.yellow}{t.bold}')
    elif method == 'symlink':
        msg('W: Files will be linked!', fmtt='{t.yellow}{t.bold}')
    if os.path.exists(in_dirpath):
        # :: analyze directory tree
        dcm_dirpaths = set()
        for filepath in get_filepaths(in_dirpath):
            msg('Analyzing `{}`...'.format(filepath),
                verbose, VERB_LVL['debug'])
            filename = os.path.basename(filepath)
            is_dicom = util.is_dicom(
                filepath,
                allow_dir=False,
                allow_report=True,
                allow_postprocess=True)
            if not is_dicom:
                is_compressed, compression = util.is_compressed_dicom(
                    filepath,
                    allow_dir=False,
                    allow_report=True,
                    allow_postprocess=True)
            else:
                is_compressed = False
                compression = None
            if is_dicom or is_compressed and compression in util.COMPRESSIONS:
                dcm_subpath = None
                if subpath or extra_subpath:
                    full_subpath = os.path.join(subpath, extra_subpath)
                elif subpath:
                    full_subpath = subpath
                else:  # if extra_subpath:
                    full_subpath = extra_subpath
                if full_subpath:
                    dcm_subpath = util.fill_from_dicom(full_subpath, filepath)
                    dcm_dirpath = os.path.join(out_dirpath, dcm_subpath)
                else:
                    dcm_dirpath = out_dirpath
                if not os.path.exists(dcm_dirpath):
                    os.makedirs(dcm_dirpath)
                if dcm_dirpath not in dcm_dirpaths:
                    if dcm_subpath:
                        msg('Subpath: {}'.format(dcm_subpath),
                            verbose, VERB_LVL['low'])
                    dcm_dirpaths.add(dcm_dirpath)
                fake_path = os.path.dirname(os.path.relpath(
                    filepath, in_dirpath)).replace(
                    os.path.sep, util.INFO_SEP) + util.INFO_SEP
                out_filepath = os.path.join(dcm_dirpath, fake_path + filename)
                if not os.path.isfile(out_filepath) or force:
                    if method == 'move':
                        shutil.move(filepath, out_filepath)
                    elif method == 'copy':
                        shutil.copy(filepath, out_filepath)
                    elif method == 'symlink':
                        os.symlink(filepath, out_filepath)
                    elif method == 'link':
                        os.link(filepath, out_filepath)
                else:
                    msg('I: Skipping existing output path. '
                        'Use `force` to override.')
            else:
                name = filepath[len(in_dirpath):]
                msg('W: Invalid source found `{}`'.format(name),
                    verbose, VERB_LVL['medium'])
    else:
        dcm_dirpaths = None
        msg('W: Input path does NOT exists.', verbose, VERB_LVL['low'])
    return dcm_dirpaths
Ejemplo n.º 23
0
def do_backup(
        in_dirpath,
        out_dirpath=None,
        basename='{name}_{date}_{time}_{sys}',
        method='tlz',
        keep=False,
        force=False,
        verbose=D_VERB_LVL):
    """
    Safely do_backup DICOM files and test the produced archive.

    Args:
        in_dirpath (str): Path to input directory.
        out_dirpath (str): Path to output directory.
        basename (str): The name of the backup file.
            Extract and interpret fields from DICOM, according to field
            specifications: <field::format>.
            For more information on accepted syntax,
            see `utl.fill_from_dicom()`.
        method (str): The Compression method.
            Accepted values:
             - '7z': Use 7z compression format.
        keep (bool): Do NOT remove DICOM sources afterward.
        force (bool): Force new processing.
        verbose (int): Set level of verbosity.

    Returns:
        None.
    """

    def _success(ret_code, p_stdout, p_stderr):
        # and p_stdout.find('Everything is Ok') > 0
        return not ret_code

    msg(':: Backing up DICOM folder...')
    msg('Input:  {}'.format(in_dirpath))
    dcm_filename, compression = utl.find_a_dicom(in_dirpath)
    out_basename = utl.fill_from_dicom(basename, dcm_filename)
    if not out_dirpath:
        out_dirpath = os.path.dirname(in_dirpath)
    out_filepath = os.path.join(out_dirpath, out_basename)
    if method in ARCHIVE_EXT:
        out_filepath += '.' + ARCHIVE_EXT[method]
    msg('Output: {}'.format(out_filepath))
    success = False
    if not os.path.exists(out_filepath) or force:
        if method == 'tlz':
            cmd_token_list = [
                'tar', '--lzip', '-cf', out_filepath, in_dirpath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Backup' + (' ' if success else ' NOT ') + 'successful.')
            # :: test archive
            cmd_token_list = ['lzip', '-t', out_filepath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Test was' + (' ' if success else ' NOT ') + 'successful.')

        elif method == 'tgz':
            cmd_token_list = [
                'tar', '--gzip', '-cf', out_filepath, in_dirpath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            _success(ret_code, p_stdout, p_stderr)
            msg(':: Backup' + (' ' if success else ' NOT ') + 'successful.')
            # :: test archive
            cmd_token_list = ['gzip', '-t', out_filepath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Test was' + (' ' if success else ' NOT ') + 'successful.')

        elif method == 'tbz2':
            cmd_token_list = [
                'tar', '--bzip2', '-cf', out_filepath, in_dirpath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Backup' + (' ' if success else ' NOT ') + 'successful.')
            # :: test archive
            cmd_token_list = ['bzip2', '-t', out_filepath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Test was' + (' ' if success else ' NOT ') + 'successful.')

        elif method == '7z':
            cmd_token_list = [
                '7z', 'a', '-mx9', out_filepath, in_dirpath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Backup' + (' ' if success else ' NOT ') + 'successful.')
            # :: test archive
            cmd_token_list = ['7z', 't', out_filepath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Test was' + (' ' if success else ' NOT ') + 'successful.')

        elif method == 'zip':
            cmd_token_list = [
                'zip', 'a', '-mx9', out_filepath, in_dirpath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Backup' + (' ' if success else ' NOT ') + 'successful.')
            # :: test archive
            cmd_token_list = ['zip', '-T', out_filepath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Test was' + (' ' if success else ' NOT ') + 'successful.')

        elif method == 'txz':
            cmd_token_list = [
                'tar', '--xz', '-cf', out_filepath, in_dirpath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Backup' + (' ' if success else ' NOT ') + 'successful.')
            # :: test archive
            cmd_token_list = ['xz', '-t', out_filepath]
            cmd = ' '.join(cmd_token_list)
            ret_code, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
            success = _success(ret_code, p_stdout, p_stderr)
            msg(':: Test was' + (' ' if success else ' NOT ') + 'successful.')

        else:
            msg('W: Unknown method `{}`.'.format(method))
        if success and not keep and os.path.exists(in_dirpath):
            msg('Remove: {}'.format(in_dirpath))
            shutil.rmtree(in_dirpath, ignore_errors=True)
    else:
        msg('I: Skipping existing output path. Use `force` to override.')
Ejemplo n.º 24
0
def dcmpi_explorer_cli(*_args, **_kws):
    msg('We-are-doomed...')
    pass
Ejemplo n.º 25
0
def get_prot(in_dirpath,
             out_dirpath,
             method='pydicom',
             type_ext=False,
             force=False,
             verbose=D_VERB_LVL):
    """
    Extract protocol information from DICOM files and store them as text files.

    Parameters
    ==========
    in_dirpath : str
        Path to input directory.
    out_dirpath : str
        Path to output directory.
    method : str (optional)
        | Extraction method. Accepted values:
        * pydicom: Use PyDICOM Python module.
    type_ext : boolean (optional)
        Add type extension to filename.
    force : boolean (optional)
        Force new processing.
    verbose : int (optional)
        Set level of verbosity.

    Returns
    =======
    None.

    """
    msg(':: Exporting PROTOCOL information ({})...'.format(method))
    msg('Input:  {}'.format(in_dirpath))
    msg('Output: {}'.format(out_dirpath))
    sources_dict = utl.dcm_sources(in_dirpath)
    groups_dict = utl.group_series(in_dirpath)
    # proceed only if output is not likely to be there
    if not os.path.exists(out_dirpath) or force:
        # :: create output directory if not exists and extract protocol
        if not os.path.exists(out_dirpath):
            os.makedirs(out_dirpath)
        if method == 'pydicom':
            for group_id, group in sorted(groups_dict.items()):
                in_filepath = sources_dict[group[0]][0]
                out_filepath = os.path.join(out_dirpath,
                                            group_id + '.' + utl.ID['prot'])
                out_filepath += ('.' + utl.EXT['txt']) if type_ext else ''
                try:
                    dcm = pydcm.read_file(in_filepath)
                    prot_src = dcm[utl.DCM_ID['hdr_nfo']].value
                    prot_str = utl.get_protocol(prot_src)
                except Exception as e:
                    print(e)
                    msg('E: failed processing \'{}\''.format(in_filepath))
                else:
                    msg('Protocol: {}'.format(out_filepath[len(out_dirpath):]))
                    with open(out_filepath, 'w') as prot_file:
                        prot_file.write(prot_str)
        else:
            msg('W: Unknown method `{}`.'.format(method))
    else:
        msg('I: Skipping existing output path. Use `force` to override.')
Ejemplo n.º 26
0
def dcm_analyze_dir(dirpath,
                    match='{"_concat":"and"}',
                    action='',
                    force=False,
                    verbose=D_VERB_LVL):
    """
    Analyze a DICOM, performing an action if its data match the request.

    Args:
        dirpath (str): Directory where to look for DICOM files.
        match (str): A JSON-encoded dict with matching information.
            Any key not starting with `_` should specify a DICOM field, while
            the val should contain a regular expression.
            Keys starting with `_` contain special directives:
             - `_concat` (str): the concatenation method for matching rules.
               Accepted values are: ['and'|'or']
        action (str): Action to be performed.
            Accepted values are:
             - send_email: send an email to the first e-mail found.
             - dcmpi_cli: run dcmpi_cli pipeline.
        force (bool): Force new processing.
        verbose (int): Set level of verbosity.

    Returns:
        None.
    """
    dcm_filepath = utl.find_a_dicom(dirpath)[0]
    try:
        dcm = pydcm.read_file(dcm_filepath)
        # check matching
        conditions = json.loads(match)
        concat = conditions.pop('_concat').lower() \
            if '_concat' in conditions else 'and'
        if concat == 'and':
            matched = True
            for key, val in conditions.items():
                name = getattr(dcm, key) if key in dcm else ''
                msg('Match `{}`:`{}` (read:`{}`)'.format(key, val, name))
                if not re.match(val, name):
                    matched = False
                    break
        elif concat == 'or':
            matched = False
            for key, val in conditions.items():
                name = getattr(dcm, key) if key in dcm else ''
                if re.match(val, name):
                    matched = True
                    break
        else:
            raise ValueError('Unknown concatenation method.')
    except Exception as e:
        print(e)
        msg('E: Could not get information from `{}`.'.format(dcm_filepath))
    else:
        # perform action
        if matched:
            if action.lower() == 'send_mail':
                send_mail_dcm(dcm_filepath, None, force, verbose)
            elif action.lower() == 'dcmpi_cli':
                io_dirs = (dirpath, '/SCR/TEMP')
                cmd = os.path.dirname(__file__) + \
                      '/dcmpi_run_cli.py -i {} -o {}'.format(*io_dirs)
                subprocess.call(cmd, shell=True)
            elif action.lower() == 'email+preprocess':
                send_mail_dcm(dcm_filepath, None, force, verbose)
                io_dirs = (dirpath, '/SCR/TEMP')
                cmd = os.path.dirname(__file__) + \
                      '/dcmpi_run_cli.py -i {} -o {}'.format(*io_dirs)
                subprocess.call(cmd, shell=True)
            else:
                msg('W: Action `{}` not valid.'.format(action))
        else:
            msg('I: Match `{}` was not successful.'.format(match))
Ejemplo n.º 27
0
def dcmpi_run_cli(*_args, **_kws):
    msg('We-are-doomed...')
    pass
Ejemplo n.º 28
0
def get_meta(in_dirpath,
             out_dirpath,
             method='pydicom',
             type_ext=False,
             force=False,
             verbose=D_VERB_LVL):
    """
    Extract metadata information from DICOM files and save to text files.

    Parameters
    ==========
    in_dirpath : str
        Path to input directory.
    out_dirpath : str
        Path to output directory.
    method : str (optional)
        | Extraction method. Accepted values:
        * isis: Use Enrico Reimer's ISIS tool.
        * pydicom: Use PyDICOM Python module.
        * strings: Use POSIX 'string' command.
    type_ext : boolean (optional)
        Add type extension to filename.
    force : boolean (optional)
        Force new processing.
    verbose : int (optional)
        Set level of verbosity.

    Returns
    =======
    None.

    """
    msg(':: Exporting METADATA information ({})...'.format(method))
    msg('Input:  {}'.format(in_dirpath))
    msg('Output: {}'.format(out_dirpath))
    sources_dict = utl.dcm_sources(in_dirpath)
    msg('Sources: {}'.format(sources_dict), verbose, VERB_LVL['debug'])
    # groups_dict = utl.group_series(in_dirpath)
    # proceed only if output is not likely to be there
    if not os.path.exists(out_dirpath) or force:
        # :: create output directory if not exists and copy files there
        if not os.path.exists(out_dirpath):
            os.makedirs(out_dirpath)
        if method == 'pydicom':
            msg(sources_dict)
            for src_id, in_filepath_list in sorted(sources_dict.items()):
                out_filepath = os.path.join(out_dirpath,
                                            src_id + '.' + utl.ID['meta'])
                out_filepath += ('.' + utl.EXT['json']) if type_ext else ''
                info_dict = {}
                for in_filepath in in_filepath_list:
                    try:
                        dcm = pydcm.read_file(in_filepath)
                    except Exception as e:
                        msg('E: failed processing `{}`'.format(in_filepath),
                            verbose, D_VERB_LVL)
                        msg('E: ...with exception: {}'.format(e), verbose,
                            VERB_LVL['debug'])
                    else:
                        dcm_dict = utl.dcm_dump(dcm)
                        info_dict = utl.dcm_merge_info(info_dict, dcm_dict)
                msg('Meta: {}'.format(out_filepath[len(out_dirpath):]),
                    verbose, D_VERB_LVL)
                with open(out_filepath, 'w') as info_file:
                    json.dump(info_dict, info_file, sort_keys=True, indent=4)

        elif method == 'isis':
            for src_id, in_filepath_list in sorted(sources_dict.items()):
                in_filepath = os.path.join(in_dirpath, src_id)
                out_filepath = os.path.join(out_dirpath,
                                            src_id + '.' + utl.ID['meta'])
                out_filepath += ('.' + utl.EXT['txt']) if type_ext else ''
                msg('Metadata: {}'.format(out_filepath[len(out_dirpath):]))
                opts = ' -np'  # do not include progress bar
                opts += ' -rdialect withExtProtocols'  # extended prot info
                opts += ' -chunks'  # information from each chunk
                # cmd = 'isisdump -in {} {}'.format(in_filepath, opts)
                # ret_val, p_stdout, p_stderr = utl.execute(cmd, verbose=verbose)
                cmd = 'isisdump -in {} {} > {} &> {}'.format(
                    in_filepath, opts, out_filepath, out_filepath)
                utl.execute(cmd, mode='call', verbose=verbose)

        elif method == 'dcm_dump':
            # TODO: implement meaningful super-robust string method.
            msg('W: Method `{}` not implemented yet.')

        elif method == 'strings':
            # TODO: implement meaningful super-robust string method.
            msg('W: Method `{}` not implemented yet.')

        else:
            msg('W: Unknown method `{}`.'.format(method))
    else:
        msg('I: Skipping existing output path. Use `force` to override.')
Ejemplo n.º 29
0
def sorting(dirpath,
            summary=utl.D_SUMMARY + '.' + utl.EXT['json'],
            force=False,
            verbose=D_VERB_LVL):
    """
    Sort DICOM files for series and acquisition.

    Results are saved to a summary file.

    Args:
        dirpath (str): Path containing DICOM files to sort.
        summary (str): File name or path where to save grouping summary.
        force (bool): Force new processing.
        verbose (int): Set level of verbosity.

    Returns:
        summary (dict): Summary of acquisitions .

    See Also:
        dcmpi.common.group_series, dcmpi.common.dcm_sources
    """
    # :: group dicom files according to serie number
    msg('Sort: {}'.format(dirpath))

    dirpath = os.path.realpath(dirpath)
    sorted_sources = {}
    for in_filename in sorted(os.listdir(dirpath)):
        in_filepath = os.path.join(dirpath, in_filename)
        try:
            dcm = pydcm.read_file(in_filepath)
        except IOError:
            msg('W: unable to process `{}`'.format(in_filepath), verbose,
                VERB_LVL['debug'])
        except Exception as e:
            print(e)
            msg('W: failed processing `{}`'.format(in_filepath), verbose,
                VERB_LVL['debug'])
        else:
            src_id = utl.INFO_SEP.join(
                (utl.PREFIX_ID['series'] +
                 '{:0{size}d}'.format(dcm.SeriesNumber, size=utl.D_NUM_DIGITS),
                 dcm.SeriesDescription))
            if src_id not in sorted_sources:
                sorted_sources[src_id] = []
            sorted_sources[src_id].append(in_filepath)
    # :: move dicom files to serie number folder
    for src_id, sources in sorted(sorted_sources.items()):
        out_subdirpath = os.path.join(dirpath, src_id)
        if not os.path.exists(out_subdirpath) or force:
            if not os.path.exists(out_subdirpath):
                os.makedirs(out_subdirpath)
            for in_filepath in sources:
                out_filepath = os.path.join(out_subdirpath,
                                            os.path.basename(in_filepath))
                shutil.move(in_filepath, out_filepath)
    if summary:
        summary_dirpath = os.path.dirname(summary)
        if summary_dirpath:
            if not os.path.exists(summary_dirpath):
                os.makedirs(os.path.dirname(summary))
        else:
            summary = os.path.join(dirpath, summary)
        summary = utl.group_series(dirpath, summary, force, verbose)
    return summary