def test_decide_pointgroup(self): meta = {'beam': (120.55726144764847, 119.44351135718689), 'detector': 'EIGER_9M', 'detector_class': 'eiger 9M', 'directory': '/mnt/optane/hbernstein/CollinsLaccases/data/CataApo05/5/NSLS2-18_10', 'distance': 200.04000666485112, 'end': 50, 'exposure_time': 0.05000000074505806, 'extra_text': 'LIB=/usr/local/crys-local/ccp4-7.0/bin/../lib/eiger2cbf.so\n', 'matching': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50], 'oscillation': (0.0, 0.20000000298023224), 'oscillation_axis': 'Omega_I_guess', 'phi_end': 0.20000000298023224, 'phi_start': 0.0, 'phi_width': 0.20000000298023224, 'pixel': (0.07500000356230885, 0.07500000356230885), 'saturation': 92461.0, 'sensor': 0.44999999227002263, 'serial_number': 0, 'size': (3269, 3110), 'start': 1, 'template': 'CataApo05_1444_??????.h5', 'wavelength': 0.9201257824897766} p1_unit_cell = (199.7, 200.9, 202.5, 108.0, 109.9, 109.6) input_spacegroup = None self.assertEqual(decide_pointgroup(p1_unit_cell, meta, input_spacegroup), ((232.96666666666667, 232.96666666666667, 232.96666666666667, 90.0, 90.0, 90.0), 197, 6.492))
def process(self): '''Main routine, chain together all of the steps imported from autoindex, integrate, pointgroup, scale and merge.''' try: hostname = os.environ['HOSTNAME'].split('.')[0] if version == 2: try: write('Running on: %s' % hostname) except: pass else: write('Running on: {}'.format(hostname)) except Exception: pass # check input frame limits if not self._first_image is None: if self._metadata['start'] < self._first_image: start = self._metadata['start'] self._metadata['start'] = self._first_image self._metadata['phi_start'] += self._metadata['phi_width'] * \ (self._first_image - start) if not self._last_image is None: if self._metadata['end'] > self._last_image: self._metadata['end'] = self._last_image # first if the number of jobs was set to 0, decide something sensible. # this should be jobs of a minimum of 5 degrees, 10 frames. if self._n_jobs == 0: phi = self._metadata['oscillation'][1] if phi == 0.0: if version == 2: try: raise RuntimeError, 'grid scan data' except: pass else: raise RuntimeError('grid scan data') wedge = max(10, int(round(5.0 / phi))) frames = self._metadata['end'] - self._metadata['start'] + 1 n_jobs = int(round(frames / wedge)) if self._max_n_jobs > 0: if n_jobs > self._max_n_jobs: n_jobs = self._max_n_jobs self.set_n_jobs(n_jobs) if version == 2: try: write('Number of jobs: %d' % self._n_jobs) write('Number of cores: %d' % self._n_cores) except: pass else: write('Number of jobs: {}'.format(self._n_jobs)) write('Number of cores: {}'.format(self._n_cores)) step_time = time.time() if version == 2: try: write('Processing images: %d -> %d' % (self._metadata['start'], self._metadata['end'])) except: pass else: write('Processing images: {} -> {}'.format(self._metadata['start'], self._metadata['end'])) phi_end = self._metadata['phi_start'] + self._metadata['phi_width'] * \ (self._metadata['end'] - self._metadata['start'] + 1) if version == 2: try: write('Phi range: %.2f -> %.2f' % (self._metadata['phi_start'], phi_end)) write('Template: %s' % self._metadata['template']) write('Wavelength: %.5f' % self._metadata['wavelength']) write('Working in: %s' % os.getcwd()) except: pass else: write('Phi range: {:.2f} -> {:.2f}'.format( self._metadata['phi_start'], phi_end)) write('Template: {}'.format(self._metadata['template'])) write('Wavelength: {:.5f}'.format(self._metadata['wavelength'])) write('Working in: {}'.format(os.getcwd())) if self._plugin_library != " " and self._plugin_library != "None" and self._plugin_library != "none": oet = self._metadata['extra_text'] et = None for line in oet.split('\n'): if line[0:3] != "LIB=": if et == None: et = line + "\n" else: et = et + line + "\n" if et == None: self._metadata[ 'extra_text'] = "LIB=" + self._plugin_library + "\n" else: self._metadata[ 'extra_text'] = et + "LIB=" + self._plugin_library + "\n" elif self._plugin_library == "None" or self._plugin_library == "none": oet = self._metadata['extra_text'] et = None for line in oet.split('\n'): if line[0:3] != "LIB=": if et == None: et = line + "\n" else: et = et + line + "\n" self._metadata['extra_text'] = et if version == 2: try: write('Extra commands: %s' % self._metadata['extra_text']) except: pass else: write('Extra commands: {}'.format(self._metadata['extra_text'])) try: self._p1_unit_cell = autoindex(self._metadata, input_cell=self._input_cell_p1) except Exception as e: traceback.print_exc(file=open('fast_dp.error', 'w')) if version == 2: try: write('Autoindexing error: %s' % e) except: pass else: write('Autoindexing error: {}'.format(e)) fdpelogpath = get_afilepath() fdpelogprefix = get_afileprefix() if fdpelogpath: if version == 2: try: try: shutil.copyfile( 'fast_dp.error', os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error')) write('Archived fast_dp.error to %s' % os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error')) except: write('fast_dp.error not archived to %s' % os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error')) except: pass else: try: shutil.copyfile( 'fast_dp.error', os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error')) write('Archived fast_dp.error to {}'.format( os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error'))) except: write('fast_dp.error not archived to {}'.format( os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error'))) return try: mosaics = integrate(self._metadata, self._p1_unit_cell, self._resolution_low, self._n_jobs, self._n_cores) if version == 2: try: write('Mosaic spread: %.2f < %.2f < %.2f' % tuple(mosaics)) except: pass else: write('Mosaic spread: {0[0]:.2f} < {0[1]:.2f} < {0[2]:.2f}'. format(tuple(mosaics))) except RuntimeError as e: traceback.print_exc(file=open('fast_dp.error', 'w')) if version == 2: try: write('Integration error: %s' % e) except: pass else: write('Integration error: {}'.format(e)) fdpelogpath = get_afilepath() fdpelogprefix = get_afileprefix() if fdpelogpath: if version == 2: try: try: shutil.copyfile( 'fast_dp.error', os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error')) write('Archived fast_dp.error to %s' % os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error')) except: write('fast_dp.error not archived to %s' % os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error')) except: pass else: try: shutil.copyfile( 'fast_dp.error', os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error')) write('Archived fast_dp.error to {}'.format( os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error'))) except: write('fast_dp.error not archived to {}'.format( os.path.join(fdpelogpath, fdpelogprefix + 'fast_dp.error'))) return try: # FIXME in here will need a mechanism to take the input # spacegroup, determine the corresponding pointgroup # and then apply this (or verify that it is allowed then # select) metadata = copy.deepcopy(self._metadata) cell, sg_num, resol = decide_pointgroup( self._p1_unit_cell, metadata, input_spacegroup=self._input_spacegroup) self._unit_cell = cell self._space_group_number = sg_num if not self._resolution_high: self._resolution_high = resol except RuntimeError as e: if version == 2: try: write('Pointgroup error: %s' % e) except: pass else: write('Pointgroup error: {}'.format(e)) return try: self._unit_cell, self._space_group, self._nref, beam_pixels = \ scale(self._unit_cell, self._metadata, self._space_group_number, \ self._resolution_high, self._resolution_low, self._n_jobs, self._n_cores) self._refined_beam = (self._metadata['pixel'][1] * beam_pixels[1], self._metadata['pixel'][0] * beam_pixels[0]) except RuntimeError as e: if version == 2: try: write('Scaling error: %s' % e) except: pass else: write('Scaling error: {}'.format(e)) return try: n_images = self._metadata['end'] - self._metadata['start'] + 1 self._xml_results = merge() mtzlogpath = get_afilepath() mtzlogprefix = get_afileprefix() if mtzlogpath: if version == 2: try: try: shutil.copyfile( 'fast_dp.mtz', os.path.join(mtzlogpath, mtzlogprefix + 'fast_dp.mtz')) write('Archived fast_dp.mtz to %s' % os.path.join( mtzlogpath, mtzlogprefix + 'fast_dp.mtz')) except: write('fast_dp.mtz not archived to %s' % os.path.join(mtzlogpath, mtzlogprefix + 'fast_dp.mtz')) except: pass else: try: shutil.copyfile( 'fast_dp.mtz', os.path.join(mtzlogpath, mtzlogprefix + 'fast_dp.mtz')) write('Archived fast_dp.mtz to {}'.format( os.path.join(mtzlogpath, mtzlogprefix + 'fast_dp.mtz'))) except: write('fast_dp.mtz not archived to {}'.format( os.path.join(mtzlogpath, mtzlogprefix + 'fast_dp.mtz'))) except RuntimeError as e: if version == 2: try: write('Merging error: %s' % e) except: pass else: write('Merging error: {}'.format(e)) return if version == 2: try: write('Merging point group: %s' % self._space_group) write('Unit cell: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f' % \ self._unit_cell) duration = time.time() - step_time write('Processing took %s (%d s) [%d reflections]' % (time.strftime('%Hh %Mm %Ss', time.gmtime(duration)), duration, self._nref)) write('RPS: %.1f' % (float(self._nref) / duration)) except: pass else: write('Merging point group: {}'.format(self._space_group)) write( 'Unit cell: {0[0]:6.2f} {0[1]:6.2f} {0[2]:6.2f} {0[3]:6.2f} {0[4]:6.2f} {0[5]:6.2f}' .format(self._unit_cell)) duration = time.time() - step_time write('Processing took {} ({:d} s) [{:d} reflections]'.format( time.strftime('%Hh %Mm %Ss', time.gmtime(duration)), int(duration), self._nref)) write('RPS: {:.1f}'.format((float(self._nref) / duration))) # write out json and xml for func in (output.write_json, output.write_ispyb_xml): func(self._commandline, self._space_group, self._unit_cell, self._xml_results, self._start_image, self._refined_beam)
def reprocess(self): '''Main routine, chain together last few steps of processing i.e. pointgroup, scale and merge.''' try: hostname = os.environ['HOSTNAME'].split('.')[0] if version == 2: try: write('Running on: %s' % hostname) except: pass else: write('Running on: {}'.format(hostname)) except Exception: pass # check input frame limits if not self._first_image is None: if self._metadata['start'] < self._first_image: start = self._metadata['start'] self._metadata['start'] = self._first_image self._metadata['phi_start'] += self._metadata['phi_width'] * \ (self._first_image - start) if not self._last_image is None: if self._metadata['end'] > self._last_image: self._metadata['end'] = self._last_image step_time = time.time() if version == 2: try: write('Processing images: %d -> %d' % (self._metadata['start'], self._metadata['end'])) except: pass else: write('Processing images: {} -> {}'.format( self._metadata['start'], self._metadata['end'])) phi_end = self._metadata['phi_start'] + self._metadata['phi_width'] * \ (self._metadata['end'] - self._metadata['start'] + 1) if version == 2: try: write('Phi range: %.2f -> %.2f' % (self._metadata['phi_start'], phi_end)) write('Template: %s' % self._metadata['template']) write('Wavelength: %.5f' % self._metadata['wavelength']) write('Working in: %s' % os.getcwd()) except: pass else: write('Phi range: {:.2f} -> {:.2f}'.format( self._metadata['phi_start'], phi_end)) write('Template: {}'.format(self._metadata['template'])) write('Wavelength: {:.5f}'.format(self._metadata['wavelength'])) write('Working in: {}'.format(os.getcwd())) # just for information for the user, print all options for indexing # FIXME should be able to run the same from CORRECT.LP which would # work better.... from xds_reader import read_xds_idxref_lp from cell_spacegroup import spacegroup_to_lattice results = read_xds_idxref_lp('IDXREF.LP') write('For reference, all indexing results:') if version == 2: try: write('%3s %6s %6s %6s %6s %6s %6s' % \ ('Lattice', 'a', 'b', 'c', 'alpha', 'beta', 'gamma')) except: pass else: write('{:3s} {:6s} {:6s} {:6s} {:6s} {:6s} {:6s}'.format( 'Lattice ', 'a', 'b', 'c', 'alpha', 'beta', 'gamma')) for r in reversed(sorted(results)): if not type(r) == type(1): continue cell = results[r][1] if version == 2: try: write('%7s %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f' % \ (spacegroup_to_lattice(r), cell[0], cell[1], cell[2], cell[3], cell[4], cell[5])) except: pass else: write('{:7s} {:6.2f} {:6.2f} {:6.2f} {:6.2f} {:6.2f} {:6.2f}'.format( spacegroup_to_lattice(r), cell[0], cell[1], cell[2], cell[3], cell[4], cell[5])) try: # FIXME in here will need a mechanism to take the input # spacegroup, determine the corresponding pointgroup # and then apply this (or verify that it is allowed then # select) metadata = copy.deepcopy(self._metadata) cell, sg_num, resol = decide_pointgroup( self._p1_unit_cell, metadata, input_spacegroup = self._input_spacegroup) self._unit_cell = cell self._space_group_number = sg_num if not self._resolution_high: self._resolution_high = resol except RuntimeError as e: if version == 2: try: write('Pointgroup error: %s' % e) except: pass else: write('Pointgroup error: {}'.format(e)) return try: self._unit_cell, self._space_group, self._nref, beam_pixels = \ scale(self._unit_cell, self._metadata, self._space_group_number, \ self._resolution_high, self._resolution_low, self._n_jobs, self._n_cores) self._refined_beam = (self._metadata['pixel'][1] * beam_pixels[1], self._metadata['pixel'][0] * beam_pixels[0]) except RuntimeError as e: if version == 2: try: write('Scaling error: %s' % e) except: pass else: write('Scaling error: {}'.format(e)) return try: n_images = self._metadata['end'] - self._metadata['start'] + 1 self._xml_results = merge(hklout='fast_rdp.mtz', aimless_log='aimless_rerun.log') except RuntimeError as e: if version == 2: try: write('Merging error: %s' % e) except: pass else: write('Merging error: {}'.format(e)) return if version == 2: try: write('Merging point group: %s' % self._space_group) write('Unit cell: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f' % \ self._unit_cell) duration = time.time() - step_time write('Reprocessing took %s (%d s) [%d reflections]' % (time.strftime('%Hh %Mm %Ss', time.gmtime(duration)), duration, self._nref)) except: pass else: write('Merging point group: {}'.format(self._space_group)) write('Unit cell: {:6.2f} {:6.2f} {:6.2f} {:6.2f} {:6.2f} {:6.2f}'.format( self._unit_cell)) duration = time.time() - step_time write('Reprocessing took {} ({} s) [{} reflections]'.format( (time.strftime('%Hh %Mm %Ss', time.gmtime(duration)), duration, self._nref))) # write out json and xml for func, filename in [ (output.write_json, 'fast_rdp.json'), (output.write_ispyb_xml, 'fast_rdp.xml') ]: func(self._commandline, self._space_group, self._unit_cell, self._xml_results, self._start_image, self._refined_beam, filename=filename)