Esempio n. 1
0
    def process_dataset(self, metadata, flags=(), sample=None):
        numbers = datatools.frameset_to_list(metadata['frames'])
        filename = os.path.join(metadata['directory'], metadata['filename'].format(numbers[0]))
        suffix = 'anom' if 'anomalous' in flags else 'native'
        params = {
            'uuid': str(uuid.uuid4()),
            'title': 'MX analysis in progress ...',
            'state': self.manager.State.ACTIVE,
            'data': metadata,

            'sample_id': metadata['sample_id'],
            'name': metadata['name'],
            'file_names': [filename],
            'anomalous': 'anomalous' in flags,
            'activity': 'proc-{}'.format(suffix),
            'type': metadata['type'],
        }
        params = datatools.update_for_sample(params, sample, overwrite=False)
        self.manager.add_item(params, False)
        try:
            report = yield self.beamline.dps.process_mx(params, params['directory'], misc.get_project_name())
        except Exception as e:
            logger.error('MX analysis failed: {}'.format(str(e)))
            self.failed(e, params['uuid'], self.ResultType.MX)
            returnValue({})
        else:
            report['data_id'] = [_f for _f in [metadata.get('id')] if _f]
            self.save_report(report)
            self.succeeded(report, params['uuid'], self.ResultType.MX)
            returnValue(report)
Esempio n. 2
0
    def process_powder(self, metadata, flags=(), sample=None):
        file_names = [
            os.path.join(metadata['directory'], metadata['filename'].format(number))
            for number in datatools.frameset_to_list(metadata['frames'])
        ]

        params = {
            'uuid': str(uuid.uuid4()),
            'title': 'XRD Analysis in progress ...',
            'state': self.manager.State.ACTIVE,
            'data': metadata,

            'sample_id': metadata['sample_id'],
            'name': metadata['name'],
            'file_names': file_names,
            'calib': 'calibrate' in flags,
            'activity': 'proc-xrd',
            'type': metadata['type'],
        }
        params = datatools.update_for_sample(params, sample, overwrite=False)
        self.manager.add_item(params, False)
        try:
            report = yield self.beamline.dps.process_xrd(params, params['directory'], misc.get_project_name())
        except Exception as e:
            logger.error('XRD analysis failed: {}'.format(str(e)))
            self.failed(e, params['uuid'], self.ResultType.XRD)
            returnValue({})
        else:
            report['data_id'] = [_f for _f in [metadata.get('id')] if _f]
            self.save_report(report)
            self.succeeded(report, params['uuid'], self.ResultType.XRD)
            returnValue(report)
Esempio n. 3
0
    def resume_sequence(self):
        self.paused = False
        collected = 0

        # reset 'existing' field
        for run in self.config['runs']:
            existing, resumable = self.beamline.detector.check(
                run['directory'], run['name'], first=run['first'])
            run['existing'] = datatools.summarize_list(existing)
            collected += len(datatools.frameset_to_list(run['existing']))

        self.configure(self.config['runs'], take_snapshots=False)
        self.beamline.all_shutters.open()
        self.start()
Esempio n. 4
0
    def check_runlist(self, runs):
        existing = {
            run['name']: self.beamline.detector.check(run['directory'], run['name'], first=run['first'])
            for run in runs
        }

        config_data = copy.deepcopy(runs)
        success = True
        collected = 0

        # check for existing files
        if any(pair[0] for pair in existing.values()):
            details = '\n'.join([
                '{}: {}'.format(k, datatools.summarize_list(v[0]))
                for k, v in existing.items()
                if v[0]

            ])
            header = 'Frames from this sequence already exist!\n'
            sub_header = details + (
                '\n\n<b>What would you like to? </b>\n'
                'NOTE: Starting over will delete existing data!\n'
            )
            buttons = (
                ('Cancel', RESPONSE_CANCEL),
                ('Start Over', RESPONSE_REPLACE_ALL),
            )
            # Add resume option if resumable
            if all(pair[1] for pair in existing.values()):
                buttons += (('Resume', RESPONSE_SKIP),)

            response = dialogs.warning(header, sub_header, buttons=buttons)
            if response == RESPONSE_SKIP:
                success = True
                collected = 0
                for run in config_data:
                    run['existing'] = datatools.summarize_list(existing.get(run['name'], ([], False))[0])
                    collected += len(datatools.frameset_to_list(run['existing']))
            elif response == RESPONSE_REPLACE_ALL:
                success = True
            else:
                success = False
        return success, config_data, collected
Esempio n. 5
0
    def process_multiple(self, *metadatas, **kwargs):
        sample = kwargs.get('sample', None)
        flags = kwargs.get('flags', ())
        file_names = []
        names = []
        for metadata in metadatas:
            numbers = datatools.frameset_to_list(metadata['frames'])
            file_names.append(os.path.join(metadata['directory'], metadata['filename'].format(numbers[0])))
            names.append(metadata['name'])

        metadata = metadatas[0]
        suffix = 'mad' if 'mad' in flags else 'merge'
        params = {
            'uuid': str(uuid.uuid4()),
            'title': 'MX {} analysis in progress ...'.format(suffix.upper()),
            'state': self.manager.State.ACTIVE,
            'data': metadata,

            'sample_id': metadata['sample_id'],
            'name': '-'.join(names),
            'file_names': file_names,
            'anomalous': 'anomalous' in flags,
            'merge': 'merge' in flags,
            'mad': 'mad' in flags,
            'activity': 'proc-{}'.format(suffix),
            'type': metadata['type'],
        }
        params = datatools.update_for_sample(params, sample, overwrite=False)
        self.manager.add_item(params, False)

        try:
            report = yield self.beamline.dps.process_mx(params, params['directory'], misc.get_project_name())
        except Exception as e:
            logger.error('MX analysis failed: {}'.format(str(e)))
            self.failed(e, params['uuid'], self.ResultType.MX)
            returnValue({})
        else:
            report['data_id'] = [_f for _f in [metadata.get('id') for metadata in metadatas] if _f]
            self.save_report(report)
            self.succeeded(report, params['uuid'], self.ResultType.MX)
            returnValue(report)