コード例 #1
0
def calc_strategy(data_info, options=None):
    options = options or {}
    os.chdir(data_info['working_directory'])

    # indicate overwritten parameters
    suffix = []
    if options.get('resolution'):
        suffix.append("res=%0.2f" % options.get('resolution'))
    if options.get('anomalous'):
        suffix.append("anomalous")

    if len(suffix) > 0:
        step_descr = "Calculating strategy [{}]".format(", ".join(suffix))
    else:
        step_descr = 'Calculating strategy'

    if not misc.file_requirements('CORRECT.LP', 'BKGPIX.cbf', 'XDS_ASCII.HKL',
                                  'GXPARM.XDS'):
        return {
            'step': 'strategy',
            'success': False,
            'reason': 'Required files from integration missing'
        }

    if os.path.exists('GXPARM.XDS'):
        misc.backup_files('XPARM.XDS')
        shutil.copy('GXPARM.XDS', 'XPARM.XDS')
    run_info = {
        'mode': options.get('mode'),
        'anomalous': options.get('anomalous', False)
    }
    run_info.update(data_info)
    xdsio.write_xds_input("XPLAN", run_info)

    try:
        programs.xds_par(step_descr)
        info = xds.parse_xplan()

        programs.best(data_info, options)
        info.update(best.parse_best())
    except autoprocess.errors.ProcessError as e:
        return {
            'step': 'strategy',
            'success': True,
            'reason': str(e),
            'data': info
        }

    return {'step': 'strategy', 'success': True, 'data': info}
コード例 #2
0
ファイル: indexing.py プロジェクト: michel4j/auto-process
def auto_index(data_info, options=None):
    options = options or {}
    os.chdir(data_info['working_directory'])
    step_descr = 'Determining lattice orientation and parameters'
    jobs = 'IDXREF'
    run_info = {'mode': options.get('mode')}
    run_info.update(data_info)
    if not misc.file_requirements('XDS.INP', 'SPOT.XDS'):
        return {
            'step': 'indexing',
            'success': False,
            'reason': "Required files not found"
        }
    try:

        xdsio.write_xds_input(jobs, run_info)
        programs.xds_par(step_descr)
        info = xds.parse_idxref()
        diagnosis = diagnose_index(info)

        _retries = 0
        sigma = 6
        spot_size = 3
        _aliens_removed = False
        _weak_removed = False
        _spot_adjusted = False

        while info.get('failure_code') > 0 and _retries < 8:
            _all_images = (run_info['spot_range'][0] == run_info['data_range'])
            _retries += 1
            _logger.warning('Indexing failed:')
            for prob in diagnosis['problems']:
                _logger.warning('... {}'.format(PROBLEMS[prob]))

            if options.get('backup', False):
                misc.backup_files('SPOT.XDS', 'IDXREF.LP')

            if diagnosis['problems'] & {PROBLEMS.index_origin}:
                if not _all_images:
                    step_descr = '-> Expanding Spot Range'
                    run_info['spot_range'] = [run_info['data_range']]
                else:
                    step_descr = '-> Adjusting detector origin'
                    run_info['beam_center'] = diagnosis['options'].get(
                        'beam_center', run_info['beam_center'])
                xdsio.write_xds_input('COLSPOT IDXREF', run_info)
                programs.xds_par(step_descr)
                info = xds.parse_idxref()
                diagnosis = diagnose_index(info)
            elif (diagnosis['problems']
                  & {PROBLEMS.few_spots, PROBLEMS.dimension_2d
                     }) and not _all_images:
                run_info.update(spot_range=[run_info['data_range']])
                xdsio.write_xds_input('IDXREF', run_info)
                programs.xds_par('-> Expanding Spot Range')
                info = xds.parse_idxref()
                diagnosis = diagnose_index(info)
            elif (diagnosis['problems'] & {
                    PROBLEMS.poor_solution, PROBLEMS.spot_accuracy,
                    PROBLEMS.non_integral
            }) and not _spot_adjusted:
                spot_size *= 1.5
                sigma = 6
                new_params = {
                    'sigma': sigma,
                    'min_spot_size': spot_size,
                    'refine_index': "CELL BEAM ORIENTATION AXIS"
                }
                if not _all_images:
                    new_params['spot_range'] = [run_info['data_range']]
                run_info.update(new_params)
                xdsio.write_xds_input('COLSPOT IDXREF', run_info)
                programs.xds_par(
                    '-> Adjusting spot size and refinement parameters')
                info = xds.parse_idxref()
                diagnosis = diagnose_index(info)
                _spot_adjusted = spot_size > 12
            elif (diagnosis['problems']
                  & {PROBLEMS.unindexed_spots}) and not _weak_removed:
                sigma += 3
                _filter_spots(sigma=sigma)
                run_info.update(sigma=sigma)
                xdsio.write_xds_input('IDXREF', run_info)
                programs.xds_par(
                    '-> Removing weak spots (Sigma < {:2.0f})'.format(sigma))
                info = xds.parse_idxref()
                diagnosis = diagnose_index(info)
                _weak_removed = sigma >= 12
            elif (diagnosis['problems']
                  & {PROBLEMS.unindexed_spots, PROBLEMS.multiple_subtrees
                     }) and not _aliens_removed:
                _filter_spots(unindexed=True)
                xdsio.write_xds_input(jobs, run_info)
                programs.xds_par('-> Removing all alien spots')
                info = xds.parse_idxref()
                diagnosis = diagnose_index(info)
                _aliens_removed = True
            else:
                _logger.critical('.. Unable to proceed.')
                _retries = 999

    except autoprocess.errors.ProcessError as e:
        return {'step': 'indexing', 'success': False, 'reason': str(e)}

    if info.get('failure_code') == 0:
        return {'step': 'indexing', 'success': True, 'data': info}
    else:
        return {
            'step': 'indexing',
            'success': False,
            'reason': info['failure']
        }
コード例 #3
0
def integrate(data_info, options=None):
    options = {} if options is None else options
    os.chdir(data_info['working_directory'])
    run_info = {'mode': options.get('mode')}
    run_info.update(data_info)
    if options.get('backup', False):
        misc.backup_files('INTEGRATE.LP', 'INTEGRATE.HKL')

    # if optimizing the integration, copy GXPARM
    # Calculate actual number of frames
    full_range = list(
        range(run_info['data_range'][0], run_info['data_range'][1] + 1))
    skip_ranges = []
    for r_s, r_e in run_info['skip_range']:
        skip_ranges.extend(list(range(r_s, r_e + 1)))
    num_frames = len(set(full_range) - set(skip_ranges))

    if options.get('optimize', False) and os.path.exists('GXPARM.XDS'):
        misc.backup_files('XPARM.XDS')
        shutil.copy('GXPARM.XDS', 'XPARM.XDS')
        step_descr = 'Optimizing {:d} frames of dataset {}'.format(
            num_frames, log.TermColor.italics(data_info['name']))
    else:
        step_descr = 'Integrating {:d} frames of dataset {}'.format(
            num_frames, log.TermColor.italics(data_info['name']))

    # check if we are screening
    screening = options.get('mode') == 'screen'

    xdsio.write_xds_input("DEFPIX INTEGRATE", run_info)
    if not misc.file_requirements('X-CORRECTIONS.cbf', 'Y-CORRECTIONS.cbf',
                                  'XPARM.XDS'):
        return {
            'step': 'integration',
            'success': False,
            'reason': 'Required files missing'
        }

    try:
        programs.xds_par(step_descr)
        info = xds.parse_integrate()
    except autoprocess.errors.ProcessError as e:
        return {'step': 'integration', 'success': False, 'reason': str(e)}
    except:
        return {
            'step': 'integration',
            'success': False,
            'reason': "Could not parse integrate output file"
        }
    else:
        pass

    if info.get('failure') is None:
        if data_info['working_directory'] == options.get('directory'):
            info['output_file'] = 'INTEGRATE.HKL'
        else:
            info['output_file'] = os.path.join(data_info['name'],
                                               'INTEGRATE.HKL')
        return {'step': 'integration', 'success': True, 'data': info}
    else:
        return {
            'step': 'integration',
            'success': False,
            'reason': info['failure']
        }
コード例 #4
0
def scale_datasets(dsets, options=None, message="Scaling"):
    options = options or {}
    os.chdir(options.get('directory', '.'))

    # indicate overwritten parameters
    suffix = []
    suffix_txt = ""
    if options.get('resolution'):
        suffix.append(f"res={options['resolution']:0.2f}")
    if len(suffix) > 0:
        suffix_txt = f"with [{','.join(suffix)}]"
    sg_name = xtal.SG_SYMBOLS[list(
        dsets.values())[0].results['correction']['summary']['spacegroup']]

    # Check Requirements
    for dset in list(dsets.values()):
        if dset.results.get('correction') is None:
            return {
                'step': 'scaling',
                'success': False,
                'reason': 'Can only scale after successful integration'
            }

    mode = options.get('mode', 'simple')
    if mode == 'mad':
        step_descr = ("Scaling {:d} MAD datasets in space-group {} {}".format(
            len(dsets), sg_name, suffix_txt))
        sections = []
        for dset in list(dsets.values()):
            dres = dset.results
            resol = options.get('resolution',
                                dres['correction']['summary']['resolution'][0])
            in_file = dres['correction']['output_file']
            out_file = os.path.join(os.path.dirname(in_file), "XSCALE.HKL")
            sections.append({
                'anomalous':
                options.get('anomalous', False),
                'strict_absorption':
                check_chisq(dres['correction']),
                'output_file':
                out_file,
                'crystal':
                'cryst1',
                'inputs': [{
                    'input_file': in_file,
                    'resolution': resol
                }],
                'shells':
                xtal.resolution_shells(resol),
            })
            if options.get('backup', False):
                misc.backup_files(out_file, 'XSCALE.LP')
            dset.results['scaling'] = {'output_file': out_file}
    else:
        if options.get('mode') == 'merge':
            step_descr = ("Merging {:d} datasets in '{}' {}".format(
                len(dsets), sg_name, suffix_txt))
        else:
            step_descr = ("Scaling dataset in '{}' {}".format(
                sg_name, suffix_txt))
        inputs = []
        resols = []
        strict = False
        for dset in list(dsets.values()):
            dres = dset.results
            resol = options.get('resolution',
                                dres['correction']['summary']['resolution'][0])
            resols.append(resol)
            in_file = dres['correction']['output_file']
            inputs.append({'input_file': in_file, 'resolution': resol})
            strict = check_chisq(dres['correction']),
        sections = [{
            'anomalous': options.get('anomalous', False),
            'strict_absorption': strict,
            'shells': xtal.resolution_shells(min(resols)),
            'output_file': "XSCALE.HKL",
            'inputs': inputs,
        }]
        if options.get('backup', False):
            misc.backup_files('XSCALE.HKL', 'XSCALE.LP')

    xscale_options = {'sections': sections}

    xdsio.write_xscale_input(xscale_options)
    try:
        programs.xscale_par(step_descr)
        raw_info = xds.parse_xscale('XSCALE.LP')
    except autoprocess.errors.ProcessError as e:
        for dset in list(dsets.values()):
            dset.log.append((time.time(), 'scaling', False, str(e)))
        return {'step': 'scaling', 'success': False, 'reason': str(e)}

    if len(raw_info) == 1:
        info = list(raw_info.values())[0]
        # Set resolution
        if options.get('resolution'):
            resol = (options.get('resolution'), 4)
        else:
            resol = xtal.select_resolution(info['statistics'])
        info['summary']['resolution'] = resol

        if options.get('mode') == 'merge':
            dset = copy.deepcopy(list(dsets.values())[0])
            dset.parameters['name'] = 'combined'
            dset.name = dset.parameters['name']
            dsets[dset.name] = dset
        else:
            dset = list(dsets.values())[0]

        dset.results['scaling'] = info
        dset.log.append((time.time(), 'scaling', True, None))
    else:
        for name, info in list(raw_info.items()):
            # Set resolution
            if options.get('resolution'):
                resol = (options.get('resolution'), 4)
            else:
                resol = xtal.select_resolution(info['statistics'])
            info['summary']['resolution'] = resol

            dsets[name].results['scaling'].update(info)
            dsets[name].log.append((time.time(), 'scaling', True, None))

    return {'step': 'scaling', 'success': True}
コード例 #5
0
def correct(data_info, options=None):
    options = options or {}
    os.chdir(data_info['working_directory'])
    message = options.get('message', "Applying corrections to")
    step_descr = '{} dataset "{}" for space-group {}'.format(
        message, data_info['name'], xtal.SG_SYMBOLS[data_info['space_group']])
    run_info = {'mode': options.get('mode')}
    run_info.update(data_info)

    if not misc.file_requirements('INTEGRATE.HKL', 'X-CORRECTIONS.cbf',
                                  'Y-CORRECTIONS.cbf'):
        return {
            'step': 'correction',
            'success': False,
            'reason': 'Required files missing'
        }

    if options.get('backup', False):
        misc.backup_files('XDS_ASCII.HKL', 'CORRECT.LP')
    xdsio.write_xds_input("CORRECT", run_info)

    try:
        programs.xds_par(step_descr)
        info = xds.parse_correct()

        # enable correction factors if anomalous data and repeat correction
        if info.get('correction_factors') is not None and options.get(
                'anomalous', False):
            for f in info['correction_factors'].get('factors', []):
                if abs(f['chi_sq_fit'] - 1.0) > 0.25:
                    run_info.update({'strict_absorption': True})
                    xdsio.write_xds_input("CORRECT", run_info)
                    programs.xds_par()
                    info = xds.parse_correct()
                    info['strict_absorption'] = True
                    break

        # Extra statistics
        if data_info['working_directory'] == options.get('directory'):
            info['output_file'] = 'XDS_ASCII.HKL'
        else:
            sub_dir = os.path.relpath(data_info['working_directory'],
                                      options.get('directory', ''))
            info['output_file'] = os.path.join(sub_dir, 'XDS_ASCII.HKL')

        programs.xdsstat('XDS_ASCII.HKL')
        stat_info = xds.parse_xdsstat()
        info.update(stat_info)

    except autoprocess.errors.ProcessError as e:
        return {'step': 'correction', 'success': False, 'reason': str(e)}

    if info.get('failure') is None:
        if len(info.get('statistics',
                        [])) > 1 and info.get('summary') is not None:
            info['summary']['resolution'] = xtal.select_resolution(
                info['statistics'])

        return {'step': 'correction', 'success': True, 'data': info}
    else:
        return {
            'step': 'correction',
            'success': False,
            'reason': info['failure']
        }