def __init__(self, source_image=None, object_folder=None, gain = 0.32, params=None): '''Initialise the script.''' from dials.util.options import OptionParser from dxtbx.datablock import DataBlockFactory from dials.array_family import flex from iotbx.phil import parse from xfel.command_line.xfel_process import phil_scope phil_scope = parse(''' include scope xfel.command_line.xtc_process.phil_scope ''', process_includes=True) sub_phil_scope = parse(''' output { cxi_merge_picklefile = None .type = str .help = Output integration results for each color data to separate cctbx.xfel-style pickle files } indexing { stills { ewald_proximity_resolution_cutoff = 2.0 .type = float .help = For calculating the area under the green curve, or the acceptable .help = volume of reciprocal space for spot prediction, use this high-resolution cutoff } } cxi_merge { include scope xfel.command_line.cxi_merge.master_phil } ''', process_includes=True) phil_scope.adopt_scope(sub_phil_scope) # Create the parser self.parser = OptionParser( phil=phil_scope, read_datablocks=True, read_datablocks_from_images=True) self.params = params self.img = [source_image] self.obj_base = object_folder self.phil = phil_scope.extract() with misc.Capturing() as junk_output: self.datablock = DataBlockFactory.from_filenames(self.img)[0] self.obj_filename = "int_{}".format(os.path.basename(self.img[0])) self.phil.output.cxi_merge_picklefile = os.path.join(self.obj_base, self.img[0])
def load_script(self, out_dir): ''' Loads PRIME script ''' import iotbx.phil as ip script = os.path.join(out_dir, self.prime_filename) user_phil = ip.parse(open(script).read()) self.pparams = master_phil.fetch(sources=[user_phil]).extract() self.input_window.pparams = self.pparams self.input_window.inp_box.ctr.SetValue(str(self.pparams.data[0])) current_dir = os.path.dirname(self.pparams.run_no) self.input_window.out_box.ctr.SetValue(str(current_dir)) if str(self.input_window.out_box.ctr.GetValue).lower() == '': self.input_window.out_box.ctr.SetValue(self.out_dir) if str(self.pparams.title).lower() != 'none': self.input_window.title_box.ctr.SetValue(str(self.pparams.title)) if str(self.pparams.hklisoin).lower() != 'none': self.input_window.ref_box.ctr.SetValue(str(self.pparams.hklisoin)) elif str(self.pparams.hklrefin).lower() != 'none': self.input_window.ref_box.ctr.SetValue(str(self.pparams.hklrefin)) self.input_window.opt_chk_useref.SetValue(True) if str(self.pparams.n_residues).lower() == 'none': self.input_window.opt_spc_nres.SetValue(500) else: self.input_window.opt_spc_nres.SetValue(int(self.pparams.n_residues)) self.input_window.opt_spc_nproc.SetValue(int(self.pparams.n_processors))
def create_trial(self, d_min = 1.5, n_bins = 10, **kwargs): # d_min and n_bins only used if isoforms are in this trial trial = Trial(self, **kwargs) if trial.target_phil_str is not None: from iotbx.phil import parse backend = ['labelit', 'dials'][['cxi.xtc_process', 'cctbx.xfel.xtc_process'].index(self.params.dispatcher)] if backend == 'labelit': from spotfinder.applications.xfel import cxi_phil trial_params = cxi_phil.cxi_versioned_extract().persist.phil_scope.fetch(parse(trial.target_phil_str)).extract() isoforms = trial_params.isoforms elif backend == 'dials': from xfel.command_line.xtc_process import phil_scope trial_params = phil_scope.fetch(parse(trial.target_phil_str)).extract() isoforms = trial_params.indexing.stills.isoforms else: assert False if len(isoforms) > 0: for isoform in isoforms: print "Creating isoform", isoform.name db_isoform = Isoform(self, name = isoform.name, trial_id = trial.id) a, b, c, alpha, beta, gamma = isoform.cell.parameters() cell = self.create_cell(cell_a = a, cell_b = b, cell_c = c, cell_alpha = alpha, cell_beta = beta, cell_gamma = gamma, lookup_symbol = isoform.lookup_symbol, isoform_id = db_isoform.id) from cctbx.crystal import symmetry cs = symmetry(unit_cell = isoform.cell,space_group_symbol=str(isoform.lookup_symbol)) mset = cs.build_miller_set(anomalous_flag=False, d_min=d_min) binner = mset.setup_binner(n_bins=n_bins) for i in binner.range_used(): d_max, d_min = binner.bin_d_range(i) Bin(self, number = i, d_min = d_min, d_max = d_max, total_hkl = binner.counts_complete()[i], cell_id = cell.id) return trial
def write_dials_defaults(current_path): """ Generate default DIALS params; MAY BE UNNECESSARY IN LONG TERM """ from iotbx.phil import parse from dials.command_line.process import phil_scope phil_scope = parse(''' include scope xfel.command_line.xtc_process.phil_scope ''', process_includes=True) sub_phil_scope = parse(''' output { cxi_merge_picklefile = None .type = str .help = Output integration results for each color data to separate cctbx.xfel-style pickle files } indexing { stills { ewald_proximity_resolution_cutoff = 2.0 .type = float .help = For calculating the area under the green curve, or the acceptable .help = volume of reciprocal space for spot prediction, use this high-resolution cutoff } } cxi_merge { include scope xfel.command_line.cxi_merge.master_phil } ''', process_includes=True) phil_scope.adopt_scope(sub_phil_scope) with Capturing() as output: phil_scope.show() def_target_file = '{}/dials_target.phil'.format(current_path) with open(def_target_file, 'w') as targ: for one_output in output: targ.write('{}\n'.format(one_output))
def load_cxi_phil(path, args=[]): import os from labelit.phil_preferences import iotbx_defs, libtbx_defs from iotbx import phil from libtbx.phil.command_line import argument_interpreter from libtbx.utils import Sorry exts = ["", ".params", ".phil"] foundIt = False for ext in exts: if os.path.exists(path + ext): foundIt = True path += ext break if not foundIt: raise Sorry("Target not found: " + path) master_phil = phil.parse(input_string=iotbx_defs + libtbx_defs, process_includes=True) horizons_phil = master_phil.fetch( sources=[phil.parse(file_name=path, process_includes=True)]) argument_interpreter = argument_interpreter( master_phil=master_phil ) consume = [] for arg in args: try: command_line_params = argument_interpreter.process( arg=arg ) horizons_phil = horizons_phil.fetch(sources=[command_line_params,]) consume.append(arg) except Sorry,e: pass
def __init__(self, source_image, object_folder, final_folder, final_filename, final, logfile, gain = 0.32, params=None): '''Initialise the script.''' from dxtbx.datablock import DataBlockFactory self.params = params # Read settings from the DIALS target (.phil) file # If none is provided, use default settings (and may God have mercy) if self.params.dials.target != None: with open(self.params.dials.target, 'r') as settings_file: settings_file_contents = settings_file.read() settings = parse(settings_file_contents) current_phil = phil_scope.fetch(sources=[settings]) self.phil = current_phil.extract() else: self.phil = phil_scope.extract() # Set general file-handling settings file_basename = os.path.basename(source_image).split('.')[0] self.phil.output.datablock_filename = "{}/{}.json".format(object_folder, file_basename) self.phil.output.indexed_filename = "{}/{}_indexed.pickle".format(object_folder, file_basename) self.phil.output.strong_filename = "{}/{}_strong.pickle".format(object_folder, file_basename) self.phil.output.refined_experiments_filename = "{}/{}_refined_experiments.json".format(object_folder, file_basename) self.phil.output.integrated_filename = "{}/{}_integrated.pickle".format(object_folder, file_basename) self.phil.output.profile_filename = "{}/{}_profile.phil".format(object_folder, file_basename) self.phil.output.integration_pickle = final_filename self.int_log = logfile #"{}/int_{}.log".format(final_folder, file_basename) self.img = [source_image] self.obj_base = object_folder self.gain = gain self.fail = None self.frame = None self.final = final self.final['final'] = final_filename with misc.Capturing() as junk_output: self.datablock = DataBlockFactory.from_filenames(self.img)[0] self.obj_filename = "int_{}".format(os.path.basename(self.img[0]))
def from_parameters(params=None): ''' Given a set of parameters, construct the spot finder :param params: The input parameters :returns: The spot finder instance ''' from dials.util.masking import MaskGenerator from dials.algorithms.spot_finding.finder import SpotFinder from libtbx.phil import parse if params is None: params = phil_scope.fetch(source=parse("")).extract() # Read in the lookup files mask = SpotFinderFactory.load_image(params.spotfinder.lookup.mask) params.spotfinder.lookup.mask = mask # Configure the filter options filter_spots = SpotFinderFactory.configure_filter(params) # Create the threshold strategy threshold_function = SpotFinderFactory.configure_threshold(params) # Configure the mask generator mask_generator = MaskGenerator(params.spotfinder.filter) # Setup the spot finder return SpotFinder( threshold_function = threshold_function, mask = params.spotfinder.lookup.mask, filter_spots = filter_spots, scan_range = params.spotfinder.scan_range, write_hot_mask = params.spotfinder.write_hot_mask, mp_method = params.spotfinder.mp.method, nproc = params.spotfinder.mp.nproc, mp_chunksize = params.spotfinder.mp.chunksize, max_strong_pixel_fraction = params.spotfinder.filter.max_strong_pixel_fraction, region_of_interest = params.spotfinder.region_of_interest, mask_generator = mask_generator, min_spot_size = params.spotfinder.filter.min_spot_size, max_spot_size = params.spotfinder.filter.max_spot_size)
def process_input(args, phil_args, input_file, mode='auto', now=None): """ Read and parse parameter file input: input_file_list - PHIL-format files w/ parameters output: params - PHIL-formatted parameters txt_output - plain text-formatted parameters """ from libtbx.phil.command_line import argument_interpreter from libtbx.utils import Sorry if mode == 'file': user_phil = [ip.parse(open(inp).read()) for inp in [input_file]] working_phil = master_phil.fetch(sources=user_phil) params = working_phil.extract() elif mode == 'auto': params = master_phil.extract() params.description = 'IOTA parameters auto-generated on {}'.format(now) params.input = [input_file] if params.advanced.integrate_with == 'dials': params.dials.target = 'dials.phil' elif params.advanced.integrate_with == 'cctbx': params.cctbx.target = 'cctbx.phil' final_phil = master_phil.format(python_object=params) # Parse in-line params into phil argument_interpreter = argument_interpreter(master_phil=master_phil) consume = [] for arg in phil_args: try: command_line_params = argument_interpreter.process(arg=arg) final_phil = final_phil.fetch(sources=[command_line_params,]) consume.append(arg) except Sorry,e: pass
def __init__(self, img, gain, params): """ Initialization and data read-in """ from dxtbx.datablock import DataBlockFactory self.gain = gain self.params = params # Read settings from the DIALS target (.phil) file # If none is provided, use default settings (and may God have mercy) if self.params.dials.target != None: with open(self.params.dials.target, 'r') as settings_file: settings_file_contents = settings_file.read() settings = parse(settings_file_contents) current_phil = phil_scope.fetch(sources=[settings]) self.phil = current_phil.extract() else: self.phil = phil_scope.extract() # Convert raw image into single-image datablock with misc.Capturing() as junk_output: self.datablock = DataBlockFactory.from_filenames([img])[0]
def run(args): print "Parsing input..." if "-c" in args or "-h" in args or "--help" in args: phil_scope.show(attributes_level=2) return user_phil = [] paths = [] refine_phil_file = None for arg in args: if os.path.isfile(arg): try: if os.path.splitext(arg)[1] == ".phil": refine_phil_file = arg continue except Exception, e: raise Sorry("Unrecognized file %s"%arg) if os.path.isdir(arg): paths.append(arg) else: try: user_phil.append(parse(arg)) except Exception, e: raise Sorry("Unrecognized argument: %s"%arg)
def __init__(self, parent, id, title): wx.Frame.__init__(self, parent, id, title, size=(1500, 600)) self.parent = parent self.term_file = os.path.join(os.curdir, '.terminate_image_tracker') self.spf_backend = 'mosflm' self.run_indexing = False self.run_integration = False self.reset_spotfinder() # Status bar self.sb = self.CreateStatusBar() self.sb.SetFieldsCount(2) self.sb.SetStatusWidths([100, -1]) # Setup main sizer self.main_sizer = wx.BoxSizer(wx.VERTICAL) # Setup toolbar self.toolbar = self.CreateToolBar(style=wx.TB_3DBUTTONS | wx.TB_TEXT) quit_bmp = bitmaps.fetch_icon_bitmap('actions', 'exit') self.tb_btn_quit = self.toolbar.AddLabelTool(wx.ID_EXIT, label='Quit', bitmap=quit_bmp, shortHelp='Quit', longHelp='Quit image tracker') self.toolbar.AddSeparator() # pref_bmp = bitmaps.fetch_icon_bitmap('apps', 'advancedsettings') # self.tb_btn_prefs = self.toolbar.AddLabelTool(wx.ID_ANY, # label='Preferences', # bitmap=pref_bmp, # shortHelp='Preferences', # longHelp='IOTA image tracker preferences') # self.toolbar.AddSeparator() open_bmp = bitmaps.fetch_icon_bitmap('actions', 'open') self.tb_btn_open = self.toolbar.AddLabelTool(wx.ID_ANY, label='Open', bitmap=open_bmp, shortHelp='Open', longHelp='Open folder') run_bmp = bitmaps.fetch_icon_bitmap('actions', 'run') self.tb_btn_run = self.toolbar.AddLabelTool(wx.ID_ANY, label='Run', bitmap=run_bmp, shortHelp='Run', longHelp='Run Spotfinding') stop_bmp = bitmaps.fetch_icon_bitmap('actions', 'stop') self.tb_btn_stop = self.toolbar.AddLabelTool(wx.ID_ANY, label='Stop', bitmap=stop_bmp, shortHelp='Stop', longHelp='Stop Spotfinding') self.toolbar.AddSeparator() span_view = bitmaps.fetch_custom_icon_bitmap('zoom_list') self.tb_btn_view = self.toolbar.AddLabelTool(wx.ID_ANY, label='View', bitmap=span_view, kind=wx.ITEM_RADIO, shortHelp='Select to View', longHelp='Select images to view') span_zoom = bitmaps.fetch_custom_icon_bitmap('zoom_view') self.tb_btn_zoom = self.toolbar.AddLabelTool(wx.ID_ANY, label='Zoom In', bitmap=span_zoom, kind=wx.ITEM_RADIO, shortHelp='Zoom In', longHelp='Zoom in on chart') self.toolbar.ToggleTool(self.tb_btn_zoom.GetId(), True) self.toolbar.EnableTool(self.tb_btn_run.GetId(), False) self.toolbar.EnableTool(self.tb_btn_stop.GetId(), False) self.toolbar.Realize() # Setup timers self.spf_timer = wx.Timer(self) self.uc_timer = wx.Timer(self) self.ff_timer = wx.Timer(self) self.tracker_panel = TrackerPanel(self) self.data_dict = self.tracker_panel.image_list.image_list.ctr.data.copy() self.img_list_initialized = False self.main_sizer.Add(self.tracker_panel, 1, wx.EXPAND) # Generate default DIALS PHIL file default_phil = ip.parse(default_target) self.phil = phil_scope.fetch(source=default_phil) self.params = self.phil.extract() self.params.output.strong_filename = None self.phil = self.phil.format(python_object=self.params) # Bindings self.Bind(wx.EVT_TOOL, self.onQuit, self.tb_btn_quit) self.Bind(wx.EVT_TOOL, self.onGetImages, self.tb_btn_open) self.Bind(wx.EVT_TOOL, self.onRunSpotfinding, self.tb_btn_run) self.Bind(wx.EVT_TOOL, self.onStop, self.tb_btn_stop) self.Bind(wx.EVT_BUTTON, self.onSelView, self.tracker_panel.btn_view_sel) self.Bind(wx.EVT_BUTTON, self.onWrtFile, self.tracker_panel.btn_wrt_file) self.Bind(wx.EVT_BUTTON, self.onAllView, self.tracker_panel.btn_view_all) self.Bind(wx.EVT_TOOL, self.onZoom, self.tb_btn_zoom) self.Bind(wx.EVT_TOOL, self.onList, self.tb_btn_view) # Spotfinder / timer bindings self.Bind(thr.EVT_SPFDONE, self.onSpfOneDone) self.Bind(thr.EVT_SPFALLDONE, self.onSpfAllDone) self.Bind(thr.EVT_SPFTERM, self.onSpfTerminated) self.Bind(wx.EVT_TIMER, self.onSpfTimer, id=self.spf_timer.GetId()) self.Bind(wx.EVT_TIMER, self.onUCTimer, id=self.uc_timer.GetId()) self.Bind(wx.EVT_TIMER, self.onPlotOnlyTimer, id=self.ff_timer.GetId()) # Settings bindings self.Bind(wx.EVT_SPINCTRL, self.onMinBragg, self.tracker_panel.min_bragg.ctr) self.Bind(wx.EVT_SPINCTRL, self.onChartRange, self.tracker_panel.chart_window.ctr) self.Bind(wx.EVT_CHECKBOX, self.onChartRange, self.tracker_panel.chart_window.toggle) # Read arguments if any self.args, self.phil_args = parse_command_args('').parse_known_args() self.spf_backend = self.args.backend if 'index' in self.args.action: self.run_indexing = True elif 'int' in self.args.action: self.run_indexing = True self.run_integration = True self.tracker_panel.min_bragg.ctr.SetValue(self.args.bragg) if self.args.file is not None: self.results_file = self.args.file self.start_spotfinding(from_file=True) elif self.args.path is not None: path = os.path.abspath(self.args.path) self.open_images_and_get_ready(path=path) if self.args.start: print 'IMAGE_TRACKER: STARTING FROM FIRST RECORDED IMAGE' self.start_spotfinding() elif self.args.proceed: print 'IMAGE_TRACKER: STARTING FROM IMAGE RECORDED 1 MIN AGO' self.start_spotfinding(min_back=-1) elif self.args.time > 0: min_back = -self.args.time[0] print 'IMAGE_TRACKER: STARTING FROM IMAGE RECORDED {} MIN AGO' \ ''.format(min_back) self.start_spotfinding(min_back=min_back)
def test_phil(self, filepath): """ Tests incoming PHIL file to try and determine what it's for """ from iotbx import phil as ip from iotbx.file_reader import any_file as af try: if af(filepath).file_type == 'phil': test_phil = ip.parse(open(filepath).read()) else: test_phil = None except RuntimeError: # If not a PHIL file or a bad PHIL file return 'text' else: if test_phil: # Test if IOTA parameter file from iota.components.iota_input import master_phil as iota_phil new_phil, unused = iota_phil.fetch( sources=[test_phil], track_unused_definitions=True) len_test = len( test_phil.all_definitions(suppress_multiple=True)) percent_fit = (1 - len(unused) / len_test) * 100 if percent_fit >= 50: return 'IOTA settings' # Test if PRIME parameter file from prime.postrefine.mod_input import master_phil as prime_phil new_phil, unused = prime_phil.fetch( sources=[test_phil], track_unused_definitions=True) len_test = len( test_phil.all_definitions(suppress_multiple=True)) percent_fit = (1 - len(unused) / len_test) * 100 if percent_fit >= 50: return 'PRIME settings' # Test if LABELIT target file (LABELIT not always available) try: from labelit.phil_preferences import iotbx_defs, libtbx_defs except ImportError: pass else: labelit_phil = ip.parse(input_string=iotbx_defs + libtbx_defs, process_includes=True) new_phil, unused = labelit_phil.fetch( sources=[test_phil], track_unused_definitions=True) len_test = len( test_phil.all_definitions(suppress_multiple=True)) percent_fit = (1 - len(unused) / len_test) * 100 if percent_fit >= 50: return 'LABELIT target' # Test if DIALS target file from dials.command_line.stills_process import control_phil_str, \ dials_phil_str dials_phil = ip.parse(control_phil_str + dials_phil_str, process_includes=True) new_phil, unused = dials_phil.fetch( sources=[test_phil], track_unused_definitions=True) len_test = len( test_phil.all_definitions(suppress_multiple=True)) percent_fit = (1 - len(unused) / len_test) * 100 if percent_fit >= 50: return 'DIALS target' else: return 'text' else: return 'text'
def load_cached_settings(): if os.path.exists(settings_file): user_phil = parse(file_name = settings_file) return master_phil_scope.fetch(source = user_phil).extract() else: return master_phil_scope.extract()
from __future__ import division from iotbx.phil import parse # The phil scope phil_scope = parse(''' powder { water_ice { unit_cell = 4.498,4.498,7.338,90,90,120 .type = unit_cell .help = "The unit cell to generate d_spacings for ice rings." space_group = 194 .type = space_group .help = "The space group used to generate d_spacings for ice rings." d_min = 1 .type = float(value_min=0.0) .help = "The minimum resolution to filter ice rings" width = 0.06 .type = float(value_min=0.0) .help = "The width of an ice ring (in d-spacing)." } apply = *none water_ice .type = choice(multi=True) .help = "The power ring filters to apply" } ''') class PowderRingFilter: '''
""" from libtbx.phil import parse phil_scope = phil.parse( """ rs_mapper .short_caption = Reciprocal space mapper { map_file = None .type = path .optional = False .multiple= False .short_caption = Map file max_resolution = 6 .type = float .optional = True .short_caption = Resolution limit grid_size = 192 .type = int .optional = True reverse_phi = False .type = bool .optional = True } """, process_includes=True, ) class Script(object): def __init__(self):
def test_01(): # Source data data_dir = os.path.dirname(os.path.abspath(__file__)) data_ccp4 = os.path.join(data_dir, 'data', 'non_zero_origin_map.ccp4') data_pdb = os.path.join(data_dir, 'data', 'non_zero_origin_model.pdb') data_ncs_spec = os.path.join(data_dir, 'data', 'non_zero_origin_ncs_spec.ncs_spec') # DataManager dm = DataManager(['ncs_spec', 'model', 'real_map', 'phil']) dm.set_overwrite(True) # Read in map and model and ncs map_file = data_ccp4 dm.process_real_map_file(map_file) mm = dm.get_real_map(map_file) model_file = data_pdb dm.process_model_file(model_file) model = dm.get_model(model_file) ncs_file = data_ncs_spec dm.process_ncs_spec_file(ncs_file) ncs = dm.get_ncs_spec(ncs_file) ncs_dc = ncs.deep_copy() mmmn = match_map_model_ncs() mmmn.add_map_manager(mm) mmmn.add_model(model) mmmn.add_ncs_object(ncs) # Save it mmmn_dc = mmmn.deep_copy() # Make sure we can add an ncs object that is either shifted or not mmmn_dcdc = mmmn.deep_copy() new_mmmn = match_map_model_ncs() new_mmmn.add_map_manager(mmmn_dcdc.map_manager()) new_mmmn.add_model(mmmn_dcdc.model()) new_mmmn.add_ncs_object(mmmn_dcdc.ncs_object()) assert new_mmmn.ncs_object().shift_cart() == new_mmmn.map_manager( ).shift_cart() mmmn_dcdc = mmmn.deep_copy() new_mmmn = match_map_model_ncs() new_mmmn.add_map_manager(mmmn_dcdc.map_manager()) new_mmmn.add_model(mmmn_dcdc.model()) new_mmmn.add_ncs_object(ncs_dc) assert new_mmmn.ncs_object().shift_cart() == new_mmmn.map_manager( ).shift_cart() original_ncs = mmmn.ncs_object() assert approx_equal( (24.0528, 11.5833, 20.0004), tuple(original_ncs.ncs_groups()[0].translations_orth()[-1]), eps=0.1) assert tuple(mmmn._map_manager.origin_shift_grid_units) == (0, 0, 0) # Shift origin to (0,0,0) mmmn = mmmn_dc.deep_copy() # fresh version of match_map_model_ncs mmmn.shift_origin() new_ncs = mmmn.ncs_object() assert tuple(mmmn._map_manager.origin_shift_grid_units) == (100, 100, 100) mmmn.write_model('s.pdb') mmmn.write_map('s.mrc') shifted_ncs = mmmn.ncs_object() assert approx_equal( (-153.758, -74.044, -127.487), tuple(shifted_ncs.ncs_groups()[0].translations_orth()[-1]), eps=0.1) # Shift a model and shift it back mmmn = mmmn_dc.deep_copy() # fresh version of match_map_model_ncs model = mmmn.model() shifted_model = mmmn.shift_model_to_match_working_map(model=model) model_in_original_position = mmmn.shift_model_to_match_original_map( model=shifted_model) assert (approx_equal( model.get_sites_cart(), # not a copy shifted_model.get_sites_cart())) assert approx_equal(model.get_sites_cart(), model_in_original_position.get_sites_cart()) # test data_manager map_model_manager generated_mmm = dm.get_map_model_manager() print(generated_mmm) assert (isinstance(generated_mmm, map_model_manager)) # Generate a map and model import sys mmm = map_model_manager(log=sys.stdout) mmm.generate_map() model = mmm.model() mm = mmm.map_manager() assert approx_equal(model.get_sites_cart()[0], (14.476, 10.57, 8.34), eps=0.01) assert approx_equal(mm.map_data()[10, 10, 10], -0.0506, eps=0.001) # Save it mmm_dc = mmm.deep_copy() # Create model from sites mmm_sites = mmm_dc.deep_copy() from scitbx.array_family import flex sites_cart = flex.vec3_double() sites_cart.append((3, 4, 5)) mmm_sites.model_from_sites_cart(sites_cart=sites_cart, model_id='new_model') assert mmm_sites.get_model_by_id('new_model').get_sites_cart()[0] == (3, 4, 5) ph_sites = mmm_sites.get_model_by_id('new_model').get_hierarchy() text_sites = mmm_sites.get_model_by_id('new_model').model_as_pdb() # Create model from hierarchy mmm_sites = mmm_dc.deep_copy() mmm_sites.model_from_hierarchy(hierarchy=ph_sites, model_id='new_model') assert mmm_sites.get_model_by_id('new_model').get_sites_cart()[0] == (3, 4, 5) # Create model from text mmm_sites = mmm_dc.deep_copy() mmm_sites.model_from_text(text=text_sites, model_id='new_model') assert mmm_sites.get_model_by_id('new_model').get_sites_cart()[0] == (3, 4, 5) # Set crystal_symmetry and unit_cell_crystal_symmetry and shift_cart # Box and shift the map_model_manager so we have new coordinate system mmm_sites.box_all_maps_around_model_and_shift_origin() new_model = mmm_sites.get_model_by_id('new_model') assert approx_equal( (3.747033333333334, 4.723075000000001, 5.0), mmm_sites.get_model_by_id('new_model').get_sites_cart()[0]) # arbitrarily set unit_cell crystal symmetry of model to # match crystal_symmetry. First have to set shift_cart to None new_model.set_shift_cart(shift_cart=None) new_model.set_unit_cell_crystal_symmetry_and_shift_cart() assert new_model.crystal_symmetry() != mmm_sites.crystal_symmetry() # now set crystal symmetries and shift cart of model to match the manager mmm_sites.set_model_symmetries_and_shift_cart_to_match_map(new_model) assert new_model.crystal_symmetry().is_similar_symmetry( mmm_sites.crystal_symmetry()) assert new_model.unit_cell_crystal_symmetry().is_similar_symmetry( mmm_sites.unit_cell_crystal_symmetry()) assert new_model.shift_cart() == mmm_sites.shift_cart() # Import hierarchy into a model and set symmetries and shift to match mmm_sites.model_from_hierarchy(hierarchy=mmm_sites.model().get_hierarchy(), model_id='model_from_hierarchy') assert mmm_sites.get_model_by_id('model_from_hierarchy').model_as_pdb() \ == mmm_sites.get_model_by_id('model').model_as_pdb() # Check on wrapping assert not mm.wrapping( ) # this one should not wrap because it is zero at edges # Make a new one with no buffer so it is not zero at edges mmm = map_model_manager() mmm.generate_map(box_cushion=0) mm = mmm.map_manager() # check its compatibility with wrapping assert mm.is_consistent_with_wrapping() mmm.show_summary() # now box it sel = mmm.model().selection("resseq 221:221") new_model = mmm.model().deep_copy().select(sel) new_mmm = map_model_manager(model=new_model, map_manager=mm.deep_copy()) new_mmm.box_all_maps_around_model_and_shift_origin() new_mm = new_mmm.map_manager() assert not new_mm.wrapping() assert not new_mm.is_consistent_with_wrapping() # now box it with selection new_mmm_1 = map_model_manager(model=mmm.model().deep_copy(), map_manager=mm.deep_copy()) new_mmm_1.box_all_maps_around_model_and_shift_origin( selection_string="resseq 221:221") new_mm_1 = new_mmm_1.map_manager() assert not new_mm_1.wrapping() assert not new_mm_1.is_consistent_with_wrapping() assert new_mm_1.map_data().all() == new_mm.map_data().all() # create map_model_manager with just half-maps mm1 = mm.deep_copy() mm2 = mm.deep_copy() map_data = mm2.map_data() map_data += 1. new_mmm = map_model_manager(model=mmm.model().deep_copy(), map_manager_1=mm1, map_manager_2=mm2) assert new_mmm._map_dict.get( 'map_manager') is None # should not be any yet assert approx_equal(new_mmm.map_manager().map_data()[232], mm.deep_copy().map_data()[232] + 0.5) assert new_mmm._map_dict.get( 'map_manager') is not None # now should be there # generate map data from a model mm1 = mm.deep_copy() mm2 = mm.deep_copy() new_mmm = map_model_manager(model=mmm.model().deep_copy(), map_manager=mm1) mmm.generate_map(model=mmm.model()) mm = mmm.map_manager() mmm.show_summary() # check get_map_model_manager function dm = DataManager(['model']) assert not hasattr(dm, 'get_map_model_manager') dm = DataManager(['real_map']) assert not hasattr(dm, 'get_map_model_manager') dm = DataManager(['sequence']) assert not hasattr(dm, 'get_map_model_manager') dm = DataManager(['model', 'real_map']) assert hasattr(dm, 'get_map_model_manager') # usage dm.get_map_model_manager(model_file=data_pdb, map_files=data_ccp4) dm.get_map_model_manager(model_file=data_pdb, map_files=[data_ccp4]) dm.get_map_model_manager(model_file=data_pdb, map_files=[data_ccp4, data_ccp4, data_ccp4]) dm.get_map_model_manager(model_file=data_pdb, map_files=data_ccp4, ignore_symmetry_conflicts=True) # errors try: dm.get_map_model_manager(model_file=data_pdb, map_files=data_ccp4, from_phil=True) except Sorry as e: assert 'from_phil is set to True' in str(e) try: dm.get_map_model_manager(model_file=data_pdb, map_files=data_ccp4, abc=123) except TypeError as e: assert 'unexpected keyword argument' in str(e) try: dm.get_map_model_manager(model_file=data_pdb, map_files=[data_ccp4, data_ccp4]) except Sorry as e: assert '1 full map and 2 half maps' in str(e) # PHIL class test_program(ProgramTemplate): master_phil_str = ''' include scope iotbx.map_model_manager.map_model_phil_str ''' working_phil_str = ''' map_model { full_map = %s half_map = %s half_map = s.mrc model = %s } ''' % (data_ccp4, data_ccp4, data_pdb) master_phil = parse(test_program.master_phil_str, process_includes=True) working_phil = master_phil.fetch(parse(working_phil_str)) tp = test_program(dm, working_phil.extract()) try: dm.get_map_model_manager(from_phil=True) except Exception as e: assert 'ignore_symmetry_conflicts' in str(e) try: dm.get_map_model_manager(from_phil=True, ignore_symmetry_conflicts=True) except AssertionError: pass
master_phil = ip.parse(""" description = Integration Optimization, Transfer and Analysis (IOTA) .type = str .help = Run description (optional). .multiple = False .optional = True input = None .type = path .multiple = True .help = Path to folder with raw data in pickle format, list of files or single file .help = Can be a tree with folders .optional = False output = None .type = path .multiple = False .help = Base output directory, current directory in command-line, can be set in GUI .optional = True image_conversion .help = Parameters for raw image conversion to pickle format { rename_pickle_prefix = Auto .type = str .help = Specify prefix (e.g. "HEWL_room_temp") to rename all input images .help = Set to None to keep original image filenames and directory tree convert_only = False .type = bool .help = Set to True (or use -c option) to convert and exit square_mode = None *pad crop .type = choice .help = Method to generate square image beamstop = 0 .type = float .help = Beamstop shadow threshold, zero to skip distance = 0 .type = float .help = Alternate crystal-to-detector distance (set to zero to leave the same) beam_center .help = Alternate beam center coordinates (set to zero to leave the same) { x = 0 .type = float y = 0 .type = float } } image_triage .help = Check if images have diffraction using basic spotfinding (-t option) { type = None *simple grid_search .type = choice .help = Set to None to attempt integrating all images min_Bragg_peaks = 10 .type = int .help = Minimum number of Bragg peaks to establish diffraction grid_search .help = "Parameters for the grid search." { area_min = 6 .type = int .help = Minimal spot area. area_max = 24 .type = int .help = Maximal spot area. height_min = 2 .type = int .help = Minimal spot height. height_max = 20 .type = int .help = Maximal spot height. step_size = 4 .type = int .help = Grid search step size } } cctbx .help = Options for CCTBX-based image processing { target = None .type = str .multiple = False .help = Target (.phil) file with integration parameters grid_search .help = "Parameters for the grid search." { type = None *brute_force smart .type = choice .help = Set to None to only use median spotfinding parameters area_median = 5 .type = int .help = Median spot area. area_range = 2 .type = int .help = Plus/minus range for spot area. height_median = 4 .type = int .help = Median spot height. height_range = 2 .type = int .help = Plus/minus range for spot height. sig_height_search = False .type = bool .help = Set to true to scan signal height in addition to spot height } selection .help = Parameters for integration result selection { select_only .help = set to True to re-do selection with previous { flag_on = False .type = bool .help = set to True to bypass grid search and just run selection grid_search_path = None .type = path .help = set if you want to use specific grid_search results .help = leave as None to use grid search results from previous run } min_sigma = 5 .type = int .help = minimum I/sigma(I) cutoff for "strong spots" select_by = *epv mosaicity .type = choice .help = Use mosaicity or Ewald proximal volume for optimal parameter selection prefilter .help = Used to throw out integration results that do not fit user-defined unit cell information { flag_on = False .type = bool .help = Set to True to activate prefilter target_pointgroup = None .type = str .help = Target point group, e.g. "P4" target_unit_cell = None .type = unit_cell .help = In format of "a, b, c, alpha, beta, gamma", e.g. 79.4, 79.4, 38.1, 90.0, 90.0, 90.0 target_uc_tolerance = None .type = float .help = Maximum allowed unit cell deviation from target min_reflections = None .type = int .help = Minimum integrated reflections per image min_resolution = None .type = float .help = Minimum resolution for accepted images } } } dials .help = Options for DIALS-based image processing .help = This option is not yet ready for general use! { target = None .type = str .multiple = False .help = Target (.phil) file with integration parameters for DIALS min_spot_size = 6 .type = int .help = Minimal spot size global_threshold = 0 .type = int .help = Global threshold } analysis .help = "Analysis / visualization options." { run_clustering = False .type = bool .help = Set to True to turn on hierarchical clustering of unit cells cluster_threshold = 5000 .type = int .help = threshold value for unit cell clustering viz = *None integration cv_vectors .type = choice .help = Set to "integration" to visualize spotfinding and integration results. .help = Set to "cv_vectors" to visualize accuracy of CV vectors charts = False .type = bool .help = If True, outputs PDF files w/ charts of mosaicity, rmsd, etc. summary_graphs = False .type = bool .help = If True: spot-finding heatmap, res. histogram and beamXY graph } advanced .help = "Advanced, debugging and experimental options." { integrate_with = *cctbx dials .type = choice .help = Choose image processing software package estimate_gain = False .type = bool .help = Estimates detector gain (sometimes helps indexing) debug = False .type = bool .help = Used for various debugging purposes. experimental = False .type = bool .help = Set to true to run the experimental section of codes random_sample .help = Use a randomized subset of images (or -r <number> option) { flag_on = False .type = bool .help = Set to run grid search on a random set of images. number = 0 .type = int .help = Number of random samples. Set to zero to select 10% of input. } } n_processors = 32 .type = int .help = No. of processing units mp_method = *multiprocessing mpi lsf .type = choice .help = Multiprocessing method mp_queue = None .type = str .help = Multiprocessing queue prime_prefix = prime .type = str .multiple = False .help = Prefix for the PRIME input file """)
.type = bool .help = Report statistics on per-frame attributes modeled by max-likelihood fit (expert only) } parallel { nproc = 1 .help = 1, use no parallel execution. .type = int a2a = 1 .help = Number of iterations to split MPI alltoall - used to address mpy4py memory errors, when hkl chunks are too large for the cpu. .type = int } """ + mysql_master_phil phil_scope = parse(master_phil) class Script(object): '''A class for running the script.''' def __init__(self): # The script usage import libtbx.load_env self.usage = "usage: %s [options] [param.phil] " % libtbx.env.dispatcher_name self.parser = None def initialize(self): '''Initialise the script.''' from dials.util.options import OptionParser # Create the parser self.parser = OptionParser(usage=self.usage,
from iota.components.iota_utils import norm_font_size wx4 = wx.__version__[0] == '4' gui_phil = parse(''' gui .help = Options for IOTA GUI only .alias = GUI Options { image_viewer = *dials.image_viewer cctbx.image_viewer distl.image_viewer cxi.view .type = choice .help = Select image viewer (GUI only) .alias = Image Viewer .optional = False monitor_mode = False .type = bool .help = Set to true to keep watch for incoming images (GUI only) .alias = Process in Monitor Mode monitor_mode_timeout = False .type = bool .help = Set to true to auto-terminate continuous mode (GUI only) .alias = Monitor Mode Timeout monitor_mode_timeout_length = 0 .type = int .help = Timeout length in seconds (GUI only) .alias = Timeout (sec) } ''') class IOTAFrameError(Exception): def __init__(self, msg):
It's only function is to input the path to the mask file but means that the user does not have to edit the datablock file by hand. Examples:: dials.apply_mask datablock.json input.mask=mask.pickle ''' phil_scope = parse(""" input { mask = None .type = str .help = "The mask filename" } output { datablock = datablock_with_mask.json .type = str .help = "Name of output datablock file" } """, process_includes=True) class Script(object): ''' A class to encapsulate the script. ''' def __init__(self): ''' Initialise the script. ''' from dials.util.options import OptionParser import libtbx.load_env
def refine_expanding(params, merged_scope, combine_phil): assert params.start_at_hierarchy_level == 0 if params.rmsd_filter.enable: input_name = "filtered" command = "cctbx.xfel.filter_experiments_by_rmsd %s %s output.filtered_experiments=%s output.filtered_reflections=%s" command = command % ( "%s_combined.expt" % params.tag, "%s_combined.refl" % params.tag, "%s_filtered.expt" % params.tag, "%s_filtered.refl" % params.tag) command += " iqr_multiplier=%f" % params.rmsd_filter.iqr_multiplier print(command) result = easy_run.fully_buffered(command=command).raise_if_errors() result.show_stdout() else: input_name = "combined" # -------------------------- if params.panel_filter is not None: from libtbx import easy_pickle print("Filtering out all reflections except those on panels %s" % (", ".join(["%d" % p for p in params.panel_filter]))) combined_path = "%s_combined.refl" % params.tag data = easy_pickle.load(combined_path) sel = None for panel_id in params.panel_filter: if sel is None: sel = data['panel'] == panel_id else: sel |= data['panel'] == panel_id print("Retaining", len(data.select(sel)), "out of", len(data), "reflections") easy_pickle.dump(combined_path, data.select(sel)) # ---------------------------------- # this is the order to refine the CSPAD in steps = {} steps[0] = [2, 3] steps[1] = steps[0] + [0, 1] steps[2] = steps[1] + [14, 15] steps[3] = steps[2] + [6, 7] steps[4] = steps[3] + [4, 5] steps[5] = steps[4] + [12, 13] steps[6] = steps[5] + [8, 9] steps[7] = steps[6] + [10, 11] for s, panels in six.iteritems(steps): rest = [] for p in panels: rest.append(p + 16) rest.append(p + 32) rest.append(p + 48) panels.extend(rest) levels = {0: (0, 1)} # levels 0 and 1 for i in range(7): levels[i + 1] = (2, ) # level 2 previous_step_and_level = None for j in range(8): from libtbx import easy_pickle print("Filtering out all reflections except those on panels %s" % (", ".join(["%d" % p for p in steps[j]]))) combined_path = "%s_%s.refl" % (params.tag, input_name) output_path = "%s_step%d.refl" % (params.tag, j) data = easy_pickle.load(combined_path) sel = None for panel_id in steps[j]: if sel is None: sel = data['panel'] == panel_id else: sel |= data['panel'] == panel_id print("Retaining", len(data.select(sel)), "out of", len(data), "reflections") easy_pickle.dump(output_path, data.select(sel)) for i in levels[j]: print("Step", j, "refining at hierarchy level", i) refine_phil_file = "%s_refine_step%d_level%d.phil" % (params.tag, j, i) if i == 0: if params.refine_distance: diff_phil = "refinement.parameterisation.detector.fix_list=Tau1" # fix detector rotz else: diff_phil = "refinement.parameterisation.detector.fix_list=Dist,Tau1" # fix detector rotz, distance if params.flat_refinement: diff_phil += ",Tau2,Tau3" # Also fix x and y rotations diff_phil += "\n" if params.refine_energy: diff_phil += "refinement.parameterisation.beam.fix=in_spindle_plane+out_spindle_plane\n" # allow energy to refine else: # Note, always need to fix something, so pick a panel group and fix its Tau1 (rotation around Z) always if params.flat_refinement and params.flat_refinement_with_distance: diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1,Tau2,Tau3\n" # refine distance, rotz and xy translation diff_phil += "refinement.parameterisation.detector.constraints.parameter=Dist\n" # constrain distance to be refined identically for all panels at this hierarchy level elif params.flat_refinement: diff_phil = "refinement.parameterisation.detector.fix_list=Dist,Group1Tau1,Tau2,Tau3\n" # refine only rotz and xy translation else: diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1\n" # refine almost everything if previous_step_and_level is None: command = "dials.refine %s %s_%s.expt %s_step%d.refl"%( \ refine_phil_file, params.tag, input_name, params.tag, j) else: p_step, p_level = previous_step_and_level if p_step == j: command = "dials.refine %s %s_refined_step%d_level%d.expt %s_refined_step%d_level%d.refl"%( \ refine_phil_file, params.tag, p_step, p_level, params.tag, p_step, p_level) else: command = "dials.refine %s %s_refined_step%d_level%d.expt %s_step%d.refl"%( \ refine_phil_file, params.tag, p_step, p_level, params.tag, j) diff_phil += "refinement.parameterisation.detector.hierarchy_level=%d\n" % i output_experiments = "%s_refined_step%d_level%d.expt" % ( params.tag, j, i) command += " output.experiments=%s output.reflections=%s_refined_step%d_level%d.refl"%( \ output_experiments, params.tag, j, i) scope = merged_scope.fetch(parse(diff_phil)) f = open(refine_phil_file, 'w') f.write(refine_scope.fetch_diff(scope).as_str()) f.close() print(command) result = easy_run.fully_buffered(command=command).raise_if_errors() result.show_stdout() # In expanding mode, if using flat refinement with distance, after having refined this step as a block, unrefined # panels will have been left behind. Read back the new metrology, compute the shift applied to the panels refined # in this step,and apply that shift to the unrefined panels in this step if params.flat_refinement and params.flat_refinement_with_distance and i > 0: from dxtbx.model.experiment_list import ExperimentListFactory, ExperimentListDumper from xfel.command_line.cspad_detector_congruence import iterate_detector_at_level, iterate_panels from scitbx.array_family import flex from scitbx.matrix import col from libtbx.test_utils import approx_equal experiments = ExperimentListFactory.from_json_file( output_experiments, check_format=False) assert len(experiments.detectors()) == 1 detector = experiments.detectors()[0] # Displacements: deltas along the vector normal to the detector displacements = flex.double() # Iterate through the panel groups at this level for panel_group in iterate_detector_at_level( detector.hierarchy(), 0, i): # Were there panels refined in this step in this panel group? if params.panel_filter: test = [ list(detector).index(panel) in steps[j] for panel in iterate_panels(panel_group) if list( detector).index(panel) in params.panel_filter ] else: test = [ list(detector).index(panel) in steps[j] for panel in iterate_panels(panel_group) ] if not any(test): continue # Compute the translation along the normal of this panel group. This is defined as distance in dials.refine displacements.append( col(panel_group.get_local_fast_axis()).cross( col(panel_group.get_local_slow_axis())).dot( col(panel_group.get_local_origin()))) # Even though the panels are constrained to move the same amount, there is a bit a variation. stats = flex.mean_and_variance(displacements) displacement = stats.mean() print("Average displacement along normals: %f +/- %f" % (stats.mean(), stats.unweighted_sample_standard_deviation())) # Verify the variation isn't significant for k in range(1, len(displacements)): assert approx_equal(displacements[0], displacements[k]) # If all of the panel groups in this level moved, no need to do anything. if len(displacements) != len( list( iterate_detector_at_level(detector.hierarchy(), 0, i))): for panel_group in iterate_detector_at_level( detector.hierarchy(), 0, i): if params.panel_filter: test = [ list(detector).index(panel) in steps[j] and list(detector).index(panel) in params.panel_filter for panel in iterate_panels(panel_group) ] else: test = [ list(detector).index(panel) in steps[j] for panel in iterate_panels(panel_group) ] # If any of the panels in this panel group moved, no need to do anything if any(test): continue # None of the panels in this panel group moved in this step, so need to apply displacement from other panel # groups at this level fast = col(panel_group.get_local_fast_axis()) slow = col(panel_group.get_local_slow_axis()) ori = col(panel_group.get_local_origin()) normal = fast.cross(slow) panel_group.set_local_frame( fast, slow, (ori.dot(fast) * fast) + (ori.dot(slow) * slow) + (normal * displacement)) # Check the new displacements. Should be the same across all panels. displacements = [] for panel_group in iterate_detector_at_level( detector.hierarchy(), 0, i): displacements.append( col(panel_group.get_local_fast_axis()).cross( col(panel_group.get_local_slow_axis())).dot( col(panel_group.get_local_origin()))) for k in range(1, len(displacements)): assert approx_equal(displacements[0], displacements[k]) dump = ExperimentListDumper(experiments) dump.as_json(output_experiments) previous_step_and_level = j, i output_geometry(params)
def run(argv=None): if (argv is None): argv = sys.argv # XXX Could/should handle effective metrology the same way, except # it does not have a single scope. work_phil = phil.process_command_line( args=argv[1:], master_string=master_str + phil_str + additional_spotfinder_phil_defs) work_params = work_phil.work.extract() app = wx.App(0) wx.SystemOptions.SetOptionInt("osx.openfiledialog.always-show-types", 1) frame = XrayFrame(None, -1, "X-ray image display", size=(800,720)) frame.Show() # show settings panel frame.OnShowSettings(None) frame.settings_frame.panel.center_ctrl.SetValue( work_params.beam_center) frame.settings_frame.panel.integ_ctrl.SetValue( work_params.show_integration_results) frame.settings_frame.panel.spots_ctrl.SetValue( work_params.show_spotfinder_results) frame.settings.show_effective_tiling = work_params.show_effective_tiling frame.settings_frame.panel.collect_values() if (work_params.effective_metrology is not None): from xfel.cftbx.detector.metrology import \ master_phil, metrology_as_transformation_matrices stream = open(work_params.effective_metrology) metrology_phil = master_phil.fetch(sources=[phil.parse(stream.read())]) stream.close() frame.metrology_matrices = metrology_as_transformation_matrices( metrology_phil.extract()) # Update initial settings with values from the command line. Needs # to be done before image is loaded (but after the frame is # instantiated). frame.params = work_params frame.init_pyslip() frame.pyslip.tiles.user_requests_antialiasing = work_params.anti_aliasing frame.pyslip.tiles.show_untrusted = frame.params.show_untrusted paths = work_phil.remaining_args if (len(paths) == 1 and os.path.basename(paths[0]) == "DISTL_pickle"): assert os.path.isfile(paths[0]) frame.load_distl_output(paths[0]) elif (len(paths) > 0): frame.CHOOSER_SIZE = 1500 from dxtbx.imageset import ImageSetFactory from rstbx.slip_viewer.frame import chooser_wrapper sets = ImageSetFactory.new(paths) for imgset in sets: for idx in imgset.indices(): frame.add_file_name_or_data(chooser_wrapper(imgset, idx)) idx = sets[0].indices()[0] frame.load_image(chooser_wrapper(sets[0],idx)) app.MainLoop() return 0
def run(argv=None): if (argv is None): argv = sys.argv from iotbx.phil import parse small_cell_phil = parse(small_cell_phil_str,process_includes=True) welcome_message = """ %s [-s] -t PATH <directory or image paths> cctbx.small_cell: software for indexing sparse, still patterns. An excellent knowledge of the unit cell, detector distance, wavelength and beam center is required. Specify at least the unit cell in the target phil file passed in with the -t parameter. If the image can be integrated, the integrated intensities will be found in a *.int file (plain text) and in a cctbx.xfel integration pickle file. See Brewster, A.S., Sawaya, M.R., Rodriguez, J., Hattne, J., Echols, N., McFarlane, H.T., Cascio, D., Adams, P.D., Eisenberg, D.S. & Sauter, N.K. (2015). Acta Cryst. D71, doi:10.1107/S1399004714026145. Showing phil parameters: """ % libtbx.env.dispatcher_name welcome_message += small_cell_phil.as_str(attributes_level = 2) command_line = (libtbx.option_parser.option_parser( usage=welcome_message) .option(None, "--target", "-t", type="string", default=None, dest="target", metavar="PATH", help="Target phil file") .option(None, "--skip_processed_files", "-s", action="store_true", default=False, dest="skip_processed_files", help="Will skip images that have a .int file already created") ).process(args=argv[1:]) paths = command_line.args # Target phil file and at least one file to process are required if command_line.options.target is None or not os.path.isfile(command_line.options.target) \ or len(paths) == 0: command_line.parser.print_usage() return # Parse the target args = [] args.append(parse(file_name=command_line.options.target,process_includes=True)) horiz_phil = small_cell_phil.fetch(sources = args).extract() for path in paths: # process an entire directory if os.path.isdir(path): files = os.listdir(path) try: from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() # determine which subset of the files in this directory this process will # work on chunk = len(files) // size myfiles = files[rank*chunk:(rank+1)*chunk] if rank == 0: myfiles += files[len(files)-len(files)%size:len(files)] except ImportError, e: print "MPI not found, multiprocessing disabled" myfiles = files counts = [] processed = [] for file in myfiles: if (os.path.splitext(file)[1] == ".pickle" or os.path.splitext(file)[1] == ".edf") and os.path.basename(file)[0:3].lower() != "int" and file != "spotfinder.pickle": if command_line.options.skip_processed_files and os.path.exists(file + ".int"): print "Skiping %s as it has already been processed"%file continue counts.append(small_cell_index(os.path.join(path,file),horiz_phil)) if counts[-1] == None: counts[-1] = 0 processed.append(file) for file, count in zip(processed,counts): print "%s %4d spots in max clique"%(file,count) # process a single file elif os.path.isfile(path): if os.path.splitext(path)[1] == ".txt": # Given a list of a file names in a text file, process each file listed f = open(path, "r") for line in f.readlines(): if os.path.isfile(line.strip()): count = small_cell_index(line.strip(),horiz_phil) if count != None: print "%s %4d spots in max clique"%(line.strip(),count) f.close() elif os.path.splitext(path)[1] == ".int": # Summarize a .int file, providing completeness and multiplicity statistics f = open(path, "r") hkls_all = [] hkls_unique = [] files = [] for line in f.readlines(): strs = line.strip().split() src = strs[0].split(":")[0] if not src in files: files.append(src) hkl = (int(strs[7]), int(strs[8]), int(strs[9])) if not hkl in hkls_unique: hkls_unique.append(hkl) hkls_all.append(hkl) print "%d unique hkls from %d orginal files. Completeness: "%(len(hkls_unique),len(files)) from cctbx.crystal import symmetry import cctbx.miller from cctbx.array_family import flex sym = symmetry(unit_cell=horiz_phil.small_cell.powdercell, space_group_symbol=horiz_phil.small_cell.spacegroup) millerset = cctbx.miller.set(sym,flex.miller_index(hkls_unique),anomalous_flag=False) millerset = millerset.resolution_filter(d_min=horiz_phil.small_cell.high_res_limit) millerset.setup_binner(n_bins=10) data = millerset.completeness(True) data.show() data = millerset.completeness(False) print "Total completeness: %d%%\n"%(data * 100) print "%d measurements total from %d original files. Multiplicity (measurements/expected):"%(len(hkls_all),len(files)) millerset = cctbx.miller.set(sym,flex.miller_index(hkls_all),anomalous_flag=False) millerset = millerset.resolution_filter(d_min=horiz_phil.small_cell.high_res_limit) millerset.setup_binner(n_bins=10) data = millerset.completeness(True) data.show() print "Total multiplicty: %.3f"%(len(hkls_all)/len(millerset.complete_set().indices())) f.close() else: # process a regular image file count = small_cell_index(path,horiz_phil) if count != None: print "%s %4d spots in max clique"%(path,count)
debug = False .type = bool .expert_level=3 .short_caption = Output biased map .help = Additional output: biased omit map (ligand used for mask calculation \ but omitted from model) gui .help = "GUI-specific parameter required for output directory" { output_dir = None .type = path .style = output_dir } """ master_params = phil.parse(master_params_str, process_includes=True) def output_map(f_obs, r_free_flags, xray_structure, mask_data, filename, params, log): f_calc = f_obs.structure_factors_from_scatterers( xray_structure = xray_structure).f_calc() mask = f_obs.structure_factors_from_map( map = mask_data, use_scale = True, anomalous_flag = False, use_sg = False) # is it really use_sg = false? fmodel = mmtbx.f_model.manager( f_obs = f_obs, r_free_flags = r_free_flags, f_calc = f_calc,
def __init__(self,phenix_params=None): #cctbx imports global iotbx,phil,flex,reflection_file_utils,crystal_symmetry_from_any,Sorry,easy_pickle import iotbx.pdb from iotbx import phil from scitbx.array_family import flex from iotbx import reflection_file_utils from iotbx import crystal_symmetry_from_any from libtbx.utils import Sorry from libtbx import easy_pickle #PProbe imports global PPpeak,PPstruct,PPref,PPutil,PPreal,PPfeat,PPsel,PPio,PPcont from PProbe_peak import PeakObj as PPpeak from PProbe_struct import StructData as PPstruct from PProbe_ref import RSRefinements as PPref from PProbe_util import Util as PPutil from PProbe_realspace import RealSpace as PPreal from PProbe_extract import FeatureExtraction as PPfeat from PProbe_selectors import Selectors as PPsel from PProbe_dataio import DataIO as PPio from PProbe_contacts import Contacts as PPcont self.ppfeat = PPfeat() self.pput = PPutil() self.ppio = PPio() self.ppcont = PPcont(phenix_python=True) #we need one or the other assert phenix_params is not None #setup some dev options self.dev_opts = {'set_chain':False,'pdb_out_str':"", 'renumber':False,'write_ref_pdb':False, 'write_maps':False,'ressig':False, 'write_contacts':False,'proc_sol':True} #phenix params should be a file formatted by phil #should be processed properly by phil . . . phil_f = open(phenix_params,'r') phil_str = phil_f.read() phil_f.close() pprobe_phil = phil.parse(phil_str) self.phe_par = pprobe_phil.extract() for option in self.phe_par.input.parameters.map_omit_mode: if option[0] == "*": self.omit_mode = option[1::] extract_asstr = self.phe_par.pprobe.extract[0] extract = extract_asstr[0].lower() == "t" if extract: if self.omit_mode == "valsol": self.model_pdb = self.phe_par.input.pdb.strip_pdb[0] else: self.model_pdb = self.phe_par.input.pdb.model_pdb[0] self.strip_pdb = self.phe_par.input.pdb.strip_pdb[0] self.peaks_pdb = self.phe_par.input.pdb.peaks_pdb[0] self.map_coeff = self.phe_par.input.input_map.map_coeff_file[0] mdict_file = self.phe_par.input.model_param.model_dict_file[0] self.master_dict = self.ppio.read_master_dict(input_dfile=mdict_file) scres = self.phe_par.input.parameters.score_res if scres is not None: self.score_res = self.phe_par.input.parameters.score_res[0] self.out_prefix = self.phe_par.output.output_peak_prefix[0][0:4] # I can't figure out this phil parse thing, so bad hack here """ fix dev options here """ if self.omit_mode == "valsol": self.dev_opts['proc_sol'] = False self.cmdline="" #keep some old code from breaking (FIX)
def submit_job(app, job): import os, libtbx.load_env from xfel.ui import settings_dir configs_dir = os.path.join(settings_dir, "cfgs") if not os.path.exists(configs_dir): os.makedirs(configs_dir) if app.params.facility.name == 'lcls': identifier_string = "%s_%s_r%04d_t%03d_rg%03d"% \ (app.params.facility.lcls.experiment, app.params.experiment_tag, int(job.run.run), job.trial.trial, job.rungroup.id) else: identifier_string = "%s_%s_t%03d_rg%03d"% \ (app.params.experiment_tag, job.run.run, job.trial.trial, job.rungroup.id) target_phil_path = os.path.join(configs_dir, identifier_string + "_params.phil") dispatcher = app.params.dispatcher phil_str = job.trial.target_phil_str if job.rungroup.extra_phil_str is not None: phil_str += "\n" + job.rungroup.extra_phil_str from xfel.ui import load_phil_scope_from_dispatcher if dispatcher == "cxi.xtc_process": image_format = 'pickle' else: orig_phil_scope = load_phil_scope_from_dispatcher(dispatcher) if os.path.isfile(dispatcher): dispatcher = 'libtbx.python ' + dispatcher from iotbx.phil import parse if job.rungroup.two_theta_low is not None or job.rungroup.two_theta_high is not None: override_str = """ radial_average { enable = True show_plots = False verbose = False output_bins = False } """ phil_scope = orig_phil_scope.fetch(parse(override_str)) else: phil_scope = orig_phil_scope trial_params = phil_scope.fetch(parse(phil_str)).extract() image_format = job.rungroup.format if image_format == 'cbf': if "rayonix" in job.rungroup.detector_address.lower(): mode = "rayonix" elif "cspad" in job.rungroup.detector_address.lower(): mode = "cspad" elif "jungfrau" in job.rungroup.detector_address.lower(): mode = "jungfrau" else: assert False, "Couldn't figure out what kind of detector is specified by address %s"%job.rungroup.detector_address if hasattr(trial_params, 'format'): trial_params.format.file_format = image_format trial_params.format.cbf.mode = mode if job.rungroup.calib_dir is not None or job.rungroup.config_str is not None or dispatcher == 'cxi.xtc_process' or image_format == 'pickle': config_path = os.path.join(configs_dir, identifier_string + ".cfg") else: config_path = None # Dictionary for formating the submit phil and, if used, the labelit cfg file d = dict( # Generally for the LABELIT backend or image pickles address = job.rungroup.detector_address, default_calib_dir = libtbx.env.find_in_repositories("xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0"), dark_avg_path = job.rungroup.dark_avg_path, dark_stddev_path = job.rungroup.dark_stddev_path, untrusted_pixel_mask_path = job.rungroup.untrusted_pixel_mask_path, detz_parameter = job.rungroup.detz_parameter, gain_map_path = job.rungroup.gain_map_path, gain_mask_level = job.rungroup.gain_mask_level, beamx = job.rungroup.beamx, beamy = job.rungroup.beamy, energy = job.rungroup.energy, binning = job.rungroup.binning, two_theta_low = job.rungroup.two_theta_low, two_theta_high = job.rungroup.two_theta_high, # Generally for job submission dry_run = app.params.dry_run, dispatcher = dispatcher, cfg = config_path, experiment = app.params.facility.lcls.experiment, # LCLS specific parameter run_num = job.run.run, output_dir = app.params.output_folder, use_ffb = app.params.facility.lcls.use_ffb, # LCLS specific parameter # Generally for both trial = job.trial.trial, rungroup = job.rungroup.rungroup_id, experiment_tag = app.params.experiment_tag, calib_dir = job.rungroup.calib_dir, nproc = app.params.mp.nproc, nproc_per_node = app.params.mp.nproc_per_node, queue = app.params.mp.queue or None, env_script = app.params.mp.env_script[0] if len(app.params.mp.env_script) > 0 and len(app.params.mp.env_script[0]) > 0 else None, method = app.params.mp.method, target = target_phil_path, host = app.params.db.host, dbname = app.params.db.name, user = app.params.db.user, port = app.params.db.port, ) if app.params.db.password is not None and len(app.params.db.password) == 0: d['password'] = None else: d['password'] = app.params.db.password phil = open(target_phil_path, "w") if dispatcher == 'cxi.xtc_process': phil.write(phil_str) else: extra_scope = None if hasattr(trial_params, 'format'): if image_format == "cbf": trial_params.input.address = job.rungroup.detector_address trial_params.format.cbf.detz_offset = job.rungroup.detz_parameter trial_params.format.cbf.override_energy = job.rungroup.energy trial_params.format.cbf.invalid_pixel_mask = job.rungroup.untrusted_pixel_mask_path if mode == 'cspad': trial_params.format.cbf.cspad.gain_mask_value = job.rungroup.gain_mask_level elif mode == 'rayonix': trial_params.format.cbf.rayonix.bin_size = job.rungroup.binning trial_params.format.cbf.rayonix.override_beam_x = job.rungroup.beamx trial_params.format.cbf.rayonix.override_beam_y = job.rungroup.beamy trial_params.dispatch.process_percent = job.trial.process_percent if trial_params.input.known_orientations_folder is not None: trial_params.input.known_orientations_folder = trial_params.input.known_orientations_folder.format(run=job.run.run) else: trial_params.spotfinder.lookup.mask = job.rungroup.untrusted_pixel_mask_path trial_params.integration.lookup.mask = job.rungroup.untrusted_pixel_mask_path if app.params.facility.name == 'lcls': locator_path = os.path.join(configs_dir, identifier_string + ".loc") locator = open(locator_path, 'w') locator.write("experiment=%s\n"%app.params.facility.lcls.experiment) # LCLS specific parameter locator.write("run=%s\n"%job.run.run) locator.write("detector_address=%s\n"%job.rungroup.detector_address) if image_format == "cbf": if mode == 'rayonix': from xfel.cxi.cspad_ana import rayonix_tbx pixel_size = rayonix_tbx.get_rayonix_pixel_size(job.rungroup.binning) extra_scope = parse("geometry { detector { panel { origin = (%f, %f, %f) } } }"%(-job.rungroup.beamx * pixel_size, job.rungroup.beamy * pixel_size, -job.rungroup.detz_parameter)) locator.write("rayonix.bin_size=%s\n"%job.rungroup.binning) elif mode == 'cspad': locator.write("cspad.detz_offset=%s\n"%job.rungroup.detz_parameter) locator.close() d['locator'] = locator_path else: d['locator'] = None if job.rungroup.two_theta_low is not None or job.rungroup.two_theta_high is not None: try: trial_params.radial_average.two_theta_low = job.rungroup.two_theta_low trial_params.radial_average.two_theta_high = job.rungroup.two_theta_high except AttributeError: pass # not all dispatchers support radial averaging working_phil = phil_scope.format(python_object=trial_params) if extra_scope: working_phil = working_phil.fetch(extra_scope) diff_phil = orig_phil_scope.fetch_diff(source=working_phil) phil.write(diff_phil.as_str()) phil.close() if config_path is not None: if dispatcher != 'cxi.xtc_process': d['untrusted_pixel_mask_path'] = None # Don't pass a pixel mask to mod_image_dict as it will # will be used during dials processing directly config_str = "[psana]\n" if job.rungroup.calib_dir is not None: config_str += "calib-dir=%s\n"%job.rungroup.calib_dir modules = [] if job.rungroup.config_str is not None: for line in job.rungroup.config_str.split("\n"): if line.startswith('['): modules.append(line.lstrip('[').rstrip(']')) if dispatcher == 'cxi.xtc_process': modules.insert(0, 'my_ana_pkg.mod_radial_average') modules.extend(['my_ana_pkg.mod_hitfind:index','my_ana_pkg.mod_dump:index']) elif image_format == 'pickle': modules.insert(0, 'my_ana_pkg.mod_radial_average') modules.extend(['my_ana_pkg.mod_image_dict']) if app.params.facility.lcls.dump_shots: modules.insert(0, 'my_ana_pkg.mod_dump:shot') if len(modules) > 0: config_str += "modules = %s\n"%(" ".join(modules)) if job.rungroup.config_str is not None: config_str += job.rungroup.config_str + "\n" if dispatcher == 'cxi.xtc_process' or image_format == 'pickle': d['address'] = d['address'].replace('.','-').replace(':','|') # old style address if dispatcher == 'cxi.xtc_process': template = open(os.path.join(libtbx.env.find_in_repositories("xfel/ui/db/cfgs"), "index_all.cfg")) elif image_format == 'pickle': template = open(os.path.join(libtbx.env.find_in_repositories("xfel/ui/db/cfgs"), "image_dict.cfg")) for line in template.readlines(): config_str += line.format(**d) template.close() d['address'] = job.rungroup.detector_address cfg = open(config_path, 'w') cfg.write(config_str) cfg.close() if dispatcher != 'cxi.xtc_process': d['untrusted_pixel_mask_path'] = job.rungroup.untrusted_pixel_mask_path submit_phil_path = os.path.join(configs_dir, identifier_string + "_submit.phil") submit_root = libtbx.env.find_in_repositories("xfel/ui/db/cfgs") if dispatcher in ['cxi.xtc_process', 'cctbx.xfel.xtc_process']: template = open(os.path.join(submit_root, "submit_xtc_process.phil")) else: test_root = os.path.join(submit_root, "submit_" + dispatcher + ".phil") if os.path.exists(test_root): template = open(test_root) else: if hasattr(trial_params, 'format'): template = open(os.path.join(submit_root, "submit_xtc_process.phil")) else: template = open(os.path.join(submit_root, "submit_xfel_process.phil")) phil = open(submit_phil_path, "w") if dispatcher == 'cxi.xtc_process': d['target'] = None # any target phil will be in mod_hitfind for line in template.readlines(): phil.write(line.format(**d)) d['target'] = target_phil_path template.close() phil.close() from xfel.command_line.cxi_mpi_submit import Script as submit_script args = [submit_phil_path] if app.params.facility.name != 'lcls': args.append(job.run.path) return submit_script().run(args)
def __init__(self, img, index=None, termfile=None, paramfile=None, output_file=None, output_dir=None, backend='dials', action_code='spotfind', min_bragg=10, n_processors=1, verbose=False): self.img = img self.backend = backend self.paramfile = paramfile self.termfile = termfile self.n_processors = n_processors self.index = index self.verbose = verbose self.min_bragg = min_bragg if output_file is not None: if output_dir is not None: self.output = os.path.join(os.path.abspath(output_dir), output_file) else: self.output = os.path.abspath(output_file) else: self.output = None Thread.__init__(self) # Determine which processes will be included if action_code == 'spotfind': self.run_indexing = False self.run_integration = False elif action_code == 'index': self.run_indexing = True self.run_integration = False elif action_code == 'integrate': self.run_indexing = True self.run_integration = True # Initialize IOTA DIALS Processor if self.backend.lower() == 'dials': if self.paramfile is not None: with open(self.paramfile, 'r') as phil_file: phil_string = phil_file.read() user_phil = ip.parse(phil_string) self.dials_phil = phil_scope.fetch(source=user_phil) else: default_params, _ = write_defaults(method='dials', write_target_file=False, write_param_file=False) default_phil_string = '\n'.join(default_params) default_phil = ip.parse(default_phil_string) self.dials_phil = phil_scope.fetch(source=default_phil) self.params = self.dials_phil.extract() # Modify default DIALS parameters # These parameters will be set no matter what self.params.output.datablock_filename = None self.params.output.indexed_filename = None self.params.output.strong_filename = None self.params.output.refined_experiments_filename = None self.params.output.integrated_filename = None self.params.output.integrated_experiments_filename = None self.params.output.profile_filename = None self.params.output.integration_pickle = None # These parameters will be set only if there's no script if self.paramfile is None: self.params.indexing.stills.method_list = ['fft3d'] self.params.spotfinder.threshold.dispersion.global_threshold = 75 if self.backend == 'dials': self.processor = IOTADialsProcessor(params=self.params, write_pickle=False)
phil_scope = parse( """ n_bins = 3000 .type = int .help = Number of bins in the radial average d_max = 20 .type = float d_min = 1.4 .type = float panel = None .type = int .help = Only use data from the specified panel peak_position = *xyzobs shoebox .type = choice .help = By default, use the d-spacing of the peak maximum. Shoebox: Use the \ coordinates of every pixel in the reflection shoebox. This entails \ intensity-weighted peaks. peak_weighting = *unit intensity .type = choice .help = The histogram may be intensity-weighted, but the results are \ typically not very good. downweight_weak = 0 .type = float .help = Subtract a constant from every intensity. May help filter out \ impurity peaks. split_panels = False .type = bool .help = Plot a pattern for each detector panel. xyz_offset = 0. 0. 0. .type = floats .help = origin offset in millimeters output { log = dials.powder_from_spots.log .type = str xy_file = None .type = str peak_file = None .type = str .help = Optionally, specify an output file for interactive peak picking in \ the plot window. Clicking and holding on the plot will bring up a \ vertical line to help. Releasing the mouse button will add the \ nearest local maximum to the output file peak_file. geom_file = None .type = path .help = Output a (possibly modified) geometry. For use with center_scan. } center_scan { d_min = 14 .type = float d_max = 15 .type = float step_px = None .type = float .multiple = True } """ )
master_phil = phil.parse(""" include scope libtbx.phil.interface.tracking_params input { map_1 = None .type = path .short_caption = Map 1 .help = A CCP4-formatted map .style = file_type:ccp4_map bold input_file map_2 = None .type = path .short_caption = Map 2 .help = A CCP4-formatted map .style = file_type:ccp4_map bold input_file mtz_1 = None .type = path .short_caption = Map 1 .help = MTZ file containing map .style = file_type:hkl bold input_file process_hkl child:map_labels:mtz_label_1 mtz_2 = None .type = path .short_caption = Map 2 .help = MTZ file containing map .style = file_type:hkl bold input_file process_hkl child:map_labels:mtz_label_2 mtz_label_1 = None .type = str .short_caption = Data label .help = Data label for complex map coefficients in MTZ file .style = renderer:draw_map_arrays_widget mtz_label_2 = None .type = str .short_caption = Data label .help = Data label for complex map coefficients in MTZ file .style = renderer:draw_map_arrays_widget } options { resolution_factor = 0.25 .type = float .short_caption = Resolution gridding factor .help = Determines grid spacing in map } """, process_includes=True)
} } And crystal looks like (for example): unit_cell = 77, 77, 37, 90, 90, 90 space_group = P43212 detector.phil describes a single panel according to the dxtbx specifications while crystal.phil describes a unit cell and space group. """ crystal_scope = parse(""" space_group = None .type = space_group .help = "Target space group." unit_cell = None .type = unit_cell .help = "Target unit cell." """) phil_scope = parse(""" energy = 9500.0 .type = float d_min = 2.0 .type = float reference_reflection = 20 .type = int bandpass = None .type = float .help = Full width show_plots=True
if __name__ == '__main__': import sys # TODO use phil master_phil=phil.parse(""" recviewer .short_caption = Reciprocal space viewer { map_file = None .type = path .optional = False .multiple=True .short_caption = Map file max_resolution = 6 .type = float .optional = True .short_caption = Resolution limit grid_size = 192 .type = int .optional = True } """) # working_phil = master_phil.command_line_argument_interpreter().process(sys.argv[1]) # working_phil.show() # params = working_phil.extract() # print params # print params.recviewer.map_file # print params.recviewer.max_resolution
def run(args): # read in phil files (detector and crystal) d_params = detector_phil_scope.fetch(parse(file_name=args[0])).extract() detector = DetectorFactory.from_phil(d_params.geometry) print(detector) assert len(detector) == 1 panel = detector[0] c_params = crystal_scope.fetch(parse(file_name=args[1])).extract() unit_cell = c_params.unit_cell sg_info = c_params.space_group a = sqr(unit_cell.orthogonalization_matrix()) * col((1, 0, 0)) b = sqr(unit_cell.orthogonalization_matrix()) * col((0, 1, 0)) c = sqr(unit_cell.orthogonalization_matrix()) * col((0, 0, 1)) crystal = Crystal(a, b, c, sg_info.group()) print(crystal) # load additional parameters user_phil = [] for arg in args[2:]: user_phil.append(parse(arg)) params = phil_scope.fetch(sources=user_phil).extract() energy = float(params.energy) wavelength = 12398.4 / energy s0 = col((0, 0, -1 / wavelength)) if params.bandpass is not None: wavelength1 = 12398.4 / (energy - (params.bandpass / 2)) wavelength2 = 12398.4 / (energy + (params.bandpass / 2)) vals = [] print( "Reference reflections 1 and 2, resolutions, two theta (deg) 1 and 2:") for axis in range(3): m1 = [0, 0, 0] m1[axis] += params.reference_reflection m2 = [0, 0, 0] m2[axis] += params.reference_reflection + 1 # n Lambda = 2dsin(theta) d = unit_cell.d(flex.miller_index([m1, m2])) try: if params.bandpass: tt_1 = math.asin(wavelength1 / (2 * d[0])) * 2 tt_2 = math.asin(wavelength2 / (2 * d[1])) * 2 else: tt_1 = math.asin(wavelength / (2 * d[0])) * 2 tt_2 = math.asin(wavelength / (2 * d[1])) * 2 except ValueError: # domain error if resolution is too high continue # Compute two s1 vectors s1_1 = s0.rotate(col((0, 1, 0)), -tt_1) s1_2 = s0.rotate(col((0, 1, 0)), -tt_2) print(m1, m2, list(d), tt_1 * 180 / math.pi, tt_2 * 180 / math.pi) # Get panel intersections and compute spacing v1 = col(panel.get_ray_intersection_px(s1_1)) v2 = col(panel.get_ray_intersection_px(s1_2)) vals.append((v1 - v2).length()) print("Spot separations:", vals) print("Smallest spot separation: %7.1f px" % (min(vals))) # Hack for quick tests assert len(detector) == 1 panel = detector[0] fast, slow = panel.get_image_size() f = fast // 2 s = slow // 2 print("Inscribed resolution, assuming single panel centered detector %.3f:"% \ min([panel.get_resolution_at_pixel(s0, p) for p in [(f,0),(fast,s),(f,slow),(0,s)]])) print("Computing pixel resolutions...") resolutions = [] for panel in detector: fast, slow = panel.get_image_size() resolutions.append(flex.double(flex.grid(slow, fast))) for s in range(slow): for f in range(fast): resolutions[-1][s, f] = panel.get_resolution_at_pixel(s0, (f, s)) print("Done") d_max = params.d_min * 1.1 in_range = 0 total = 0 for r in resolutions: in_range += len(r.as_1d().select((r.as_1d() >= params.d_min) & (r.as_1d() <= d_max))) total += len(r) print("%d of %d pixels between %.2f and %.2f angstroms (%.1f%%)" % (in_range, total, params.d_min, d_max, 100 * in_range / total)) two_theta_d_min = math.asin(wavelength / (2 * params.d_min)) * 2 d_min_radius_mm = math.tan(two_theta_d_min) * panel.get_distance() d_min_radius_px = d_min_radius_mm / panel.get_pixel_size()[0] possible_coverage_d_min = math.pi * d_min_radius_px**2 two_theta_d_max = math.asin(wavelength / (2 * d_max)) * 2 d_max_radius_mm = math.tan(two_theta_d_max) * panel.get_distance() d_max_radius_px = d_max_radius_mm / panel.get_pixel_size()[0] possible_coverage_d_max = math.pi * d_max_radius_px**2 possible_coverage = possible_coverage_d_min - possible_coverage_d_max print( "Ideal detector would include %d pixels between %.2f-%.2f angstroms" % (possible_coverage, params.d_min, d_max)) print("Coverage: %d/%d = %.1f%%" % (in_range, possible_coverage, 100 * in_range / possible_coverage)) two_theta_values = flex.double() step = (two_theta_d_max - two_theta_d_min) / 10 for i in range(11): two_theta_values.append(two_theta_d_max + (step * i)) s0 = flex.vec3_double(len(two_theta_values), (0, 0, -1)) v = s0.rotate_around_origin((0, 1, 0), two_theta_values) all_v = flex.vec3_double() for i in range(720): i = i / 2 all_v.extend(v.rotate_around_origin((0, 0, -1), i * math.pi / 180)) intersecting_rays = flex.bool() for i in range(len(all_v)): try: panel, mm = detector.get_ray_intersection(all_v[i]) except RuntimeError: intersecting_rays.append(False) else: intersecting_rays.append(panel >= 0 and panel < len(detector)) print("%d rays out of %d projected between %f and %f intersected the detector (%.1f%%)"% \ (intersecting_rays.count(True), len(intersecting_rays), params.d_min, d_max, intersecting_rays.count(True)*100/len(intersecting_rays))) resolutions[0].set_selected(resolutions[0] > 50, 50) if params.show_plots: plt.imshow(resolutions[0].as_numpy_array(), cmap='gray') plt.colorbar() plt.figure() r = resolutions[0] sel = (r.as_1d() >= params.d_min) & (r.as_1d() <= d_max) r.as_1d().set_selected(~sel, 0) plt.imshow(r.as_numpy_array(), cmap='gray') plt.colorbar() plt.show()
f.close() ######################################################################### # new dials from iotbx.phil import parse from dxtbx.datablock import DataBlockFactory from dials.array_family import flex #from dials.algorithms.indexing.fft1d import indexer_fft1d as indexer from dials.algorithms.indexing.fft3d import indexer_fft3d as indexer import copy, os phil_scope = parse(''' include scope dials.algorithms.peak_finding.spotfinder_factory.phil_scope include scope dials.algorithms.indexing.indexer.index_only_phil_scope include scope dials.algorithms.refinement.refiner.phil_scope indexing.known_symmetry.unit_cell=43,53,89,90,90,90 .type = unit_cell indexing.known_symmetry.space_group=P212121 .type = space_group ''', process_includes=True) params = phil_scope.extract() params.refinement.parameterisation.crystal.scan_varying = False filenames = [] for arg in args: if "indexing.data" in arg: path = arg.split('=')[1] if os.path.isdir(path): for subfile in os.listdir(path): subpath = os.path.join(path, subfile) if os.path.isfile(subpath):
# initialize data manager dm = DataManager() # file IO # this map has origin at (0,0,0) dm.process_model_file("../6ui6_fit_in_corner_map.pdb") dm.process_real_map_file("../emd_20669_corner_zero.map") # this map has origin at data.all()/2 # dm.process_model_file("../6ui6.pdb") # dm.process_real_map_file("../emd_20669.map") # Run Tom's map_symmetry tool mm = dm.get_real_map() params = phil.parse(map_symmetry_program.Program.master_phil_str).extract() ncs_obj, cc_avg, score = run_get_ncs_from_map( params=params, map_data=mm.map_data(), crystal_symmetry=mm.crystal_symmetry(), ncs_obj=None) # get ncs_group object from the map_symmetry output ncs_groups = ncs_obj.ncs_groups() assert (len(ncs_groups) == 1) ncs_group = ncs_groups[0] # or if reading from file... # ncs_object = ncs() # ncs_object.read_ncs("../MapSymmetry_4/symmetry_from_map.ncs_spec")
dials.generate_mask datablock.json border=5 dials.generate_mask datablock.json \\ untrusted.rectangle=50,100,50,100 \\ untrusted.circle=200,200,100 dials.generate_mask datablock.json resolution.d_max=2.00 ''' phil_scope = parse(""" output { mask = mask.pickle .type = str .help = "Name of output mask file" } include scope dials.util.masking.phil_scope """, process_includes=True) class Script(object): ''' A class to encapsulate the script. ''' def __init__(self): ''' Initialise the script. ''' from dials.util.options import OptionParser import libtbx.load_env # Create the parser usage = "usage: %s [options] datablock.json" % libtbx.env.dispatcher_name
def from_parameters(params=None, datablock=None): ''' Given a set of parameters, construct the spot finder :param params: The input parameters :returns: The spot finder instance ''' from dials.util.masking import MaskGenerator from dials.algorithms.spot_finding.finder import SpotFinder from libtbx.phil import parse from dxtbx.imageset import ImageSweep if params is None: params = phil_scope.fetch(source=parse("")).extract() if params.spotfinder.force_2d and params.output.shoeboxes is False: no_shoeboxes_2d = True elif datablock is not None and params.output.shoeboxes is False: no_shoeboxes_2d = False all_stills = True for imageset in datablock.extract_imagesets(): if isinstance(imageset, ImageSweep): all_stills = False break if all_stills: no_shoeboxes_2d = True else: no_shoeboxes_2d = False # Read in the lookup files mask = SpotFinderFactory.load_image(params.spotfinder.lookup.mask) params.spotfinder.lookup.mask = mask # Configure the filter options filter_spots = SpotFinderFactory.configure_filter(params) # Create the threshold strategy threshold_function = SpotFinderFactory.configure_threshold( params, datablock) # Configure the mask generator mask_generator = MaskGenerator(params.spotfinder.filter) # Make sure 'none' is interpreted as None if params.spotfinder.mp.method == 'none': params.spotfinder.mp.method = None # Setup the spot finder return SpotFinder( threshold_function=threshold_function, mask=params.spotfinder.lookup.mask, filter_spots=filter_spots, scan_range=params.spotfinder.scan_range, write_hot_mask=params.spotfinder.write_hot_mask, hot_mask_prefix=params.spotfinder.hot_mask_prefix, mp_method=params.spotfinder.mp.method, mp_nproc=params.spotfinder.mp.nproc, mp_njobs=params.spotfinder.mp.njobs, mp_chunksize=params.spotfinder.mp.chunksize, max_strong_pixel_fraction=params.spotfinder.filter. max_strong_pixel_fraction, compute_mean_background=params.spotfinder.compute_mean_background, region_of_interest=params.spotfinder.region_of_interest, mask_generator=mask_generator, min_spot_size=params.spotfinder.filter.min_spot_size, max_spot_size=params.spotfinder.filter.max_spot_size, no_shoeboxes_2d=no_shoeboxes_2d, min_chunksize=params.spotfinder.mp.min_chunksize)
name = "" .type = str .help = Database name user = "" .type=str .help = Database user name password = "" .type = str .help = Database password. Will be cached as plain text! verbose = False .type = bool .expert_level = 2 .help = Print to the terminal all database queries } """ master_phil_scope = parse(master_phil_str + db_phil_str, process_includes=True) settings_dir = os.path.join(os.path.expanduser('~'), '.cctbx.xfel') settings_file = os.path.join(settings_dir, 'settings.phil') known_dials_dispatchers = { 'cctbx.xfel.xtc_process': 'xfel.command_line.xtc_process', 'cctbx.xfel.process': 'xfel.command_line.xfel_process', 'dials.stills_process': 'dials.command_line.stills_process' } def load_cached_settings(): if os.path.exists(settings_file): user_phil = parse(file_name = settings_file) return master_phil_scope.fetch(source = user_phil).extract() else:
def generate_phil_scope(): from iotbx.phil import parse import dials.extensions phil_scope = parse(''' spotfinder .help = "Parameters used in the spot finding algorithm." { include scope dials.data.lookup.phil_scope write_hot_mask = False .type = bool .help = "Write the hot mask" hot_mask_prefix = 'hot_mask' .type = str .help = "Prefix for the hot mask pickle file" force_2d = False .type = bool .help = "Do spot finding in 2D" scan_range = None .help = "The range of images to use in finding spots. The ranges are" "inclusive (e.g. j0 <= j < j1)." "For sweeps the scan range is interpreted as the literal scan" "range. Whereas for imagesets the scan range is interpreted as" "the image number in the imageset. Multiple ranges can be" "specified by repeating the scan_range= parameter." .type = ints(size=2) .multiple = True region_of_interest = None .type = ints(size=4) .help = "A region of interest to look for spots." "Specified as: x0,x1,y0,y1" "The pixels x0 and y0 are included in the range but the pixels x1 and y1" "are not. To specify an ROI covering the whole image set" "region_of_interest=0,width,0,height." compute_mean_background = False .type = bool .help = "Compute the mean background for each image" filter .help = "Parameters used in the spot finding filter strategy." { min_spot_size = Auto .help = "The minimum number of contiguous pixels for a spot" "to be accepted by the filtering algorithm." .type = int(value_min=1) max_spot_size = 100 .help = "The maximum number of contiguous pixels for a spot" "to be accepted by the filtering algorithm." .type = int(value_min=1, allow_none=False) max_separation = 2 .help = "The maximum peak-to-centroid separation (in pixels)" "for a spot to be accepted by the filtering algorithm." .type = float(value_min=0) .expert_level = 1 max_strong_pixel_fraction = 0.25 .help = "If the fraction of pixels in an image marked as strong is" "greater than this value, throw an exception" .type = float(value_min=0, value_max=1) background_gradient .expert_level=2 { filter = False .type = bool background_size = 2 .type = int(value_min=1) gradient_cutoff = 4 .type = float(value_min=0) } spot_density .expert_level=2 { filter = False .type = bool } include scope dials.util.masking.phil_scope } mp { method = *none drmaa sge lsf pbs .type = choice .help = "The cluster method to use" njobs = 1 .type = int(value_min=1) .help = "The number of cluster jobs to use" nproc = 1 .type = int(value_min=1) .help = "The number of processes to use per cluster job" chunksize = auto .type = int(value_min=1) .help = "The number of jobs to process per process" min_chunksize = 20 .type = int(value_min=1) .help = "When chunksize is auto, this is the minimum chunksize" } } ''', process_includes=True) main_scope = phil_scope.get_without_substitution("spotfinder") assert (len(main_scope) == 1) main_scope = main_scope[0] main_scope.adopt_scope(dials.extensions.SpotFinderThreshold.phil_scope()) return phil_scope
from cctbx import maptbx import iotbx.ccp4_map from cctbx import crystal from libtbx.utils import Sorry from scitbx.array_family import flex import os, sys master_phil = phil.parse(""" include scope libtbx.phil.interface.tracking_params input { map_1 = None .type = path .short_caption = Map 1 .help = A CCP4-formatted map .style = file_type:ccp4_map bold input_file map_2 = None .type = path .short_caption = Map 2 .help = A CCP4-formatted map .style = file_type:ccp4_map bold input_file } """, process_includes=True) master_params = master_phil def show_overall_statistics(s, header): print header print " min/max/mean: %6.4f %6.4f %6.4f"%(s.min(), s.max(), s.mean()) print " kurtosis : %6.4f" % s.kurtosis() print " skewness : %6.4f" % s.skewness()
.type = float .help = tape thickness sigma = 0.005 .type = float } smart_sigmas = False .type = bool .help = apply spot-specific sigma corrections using kapton param sigmas within_spot_sigmas = True .type = bool .help = calculate initial per-spot sigmas based on variance across pixels in the spot. .help = turn this off to get a major speed-up } }""" absorption_phil_scope = phil.parse(absorption_defs, process_includes=True) class get_absorption_correction(object): def __init__(self): # Kapton, or polyimide. C22H10N2O5 Density=1.43, Angle=90.deg # Photon Energy (eV), Atten Length (microns) data = """6000.00 482.643 6070.00 500.286 6140.00 518.362 6210.00 536.896 6280.00 555.873 6350.00 575.302 6420.00 595.191 6490.00 615.552 6560.00 636.382
""" phil_scope = phil.parse( """ rs_mapper .short_caption = Reciprocal space mapper { map_file = rs_mapper_output.mrc .type = path .optional = False .multiple= False .short_caption = Output map file max_resolution = 6 .type = float .optional = True .short_caption = Resolution limit grid_size = 192 .type = int .optional = True reverse_phi = False .type = bool .optional = True ignore_mask = False .type = bool .optional = True .short_caption = Ignore masks from dxtbx class } """, process_includes=True, )
master_phil = parse(""" general .short_caption = "General settings" { check_image_files_readable = True .type = bool .expert_level = 2 backstop_mask = None .type = path .short_caption = "Backstop mask" } xds { z_min = 0.0 .type = float delphi = 5 .type = float delphi_small = 30 .type = float untrusted_ellipse = None .type = ints(size = 4) .multiple = True untrusted_rectangle = None .type = ints(size = 4) .multiple = True trusted_region = None .type = floats(size = 2) profile_grid_size = None .type = ints(size = 2) keep_outliers = False .type = bool .help = "Do not remove outliers in integration and scaling" correct { refine = *DISTANCE *BEAM *AXIS *ORIENTATION *CELL *POSITION .type = choice(multi = True) .help = 'what to refine in the CORRECT step' air = None .type = float(value_min=0) } integrate { refine = *ORIENTATION *CELL *BEAM *DISTANCE AXIS *POSITION .type = choice(multi = True) .help = 'what to refine in first pass of integration' refine_final = *ORIENTATION *CELL BEAM DISTANCE AXIS POSITION .type = choice(multi = True) .help = 'what to refine in final pass of integration' fix_scale = False .type = bool delphi = 0 .type = float reflecting_range = 0 .type = float reflecting_range_esd = 0 .type = float beam_divergence = 0 .type = float beam_divergence_esd = 0 .type = float reintegrate = true .type = bool } init { fix_scale = False .type = bool } defpix { value_range_for_trusted_detector_pixels = None .type = ints(size=2) } index { refine = *ORIENTATION *CELL *BEAM *DISTANCE *AXIS *POSITION .type = choice(multi = True) .help = 'what to refine in autoindexing' debug = *OFF ON .type = choice(multi = False) .help = 'output enganced debugging for indexing' xparm = None .type = path .help = 'Use refined GXPARM.XDS geometry in indexing' xparm_ub = None .type = path .help = 'Use refined GXPARM.XDS orientation matrix in indexing' } colspot { minimum_pixels_per_spot = 1 .type = int } xscale { min_isigma = 3.0 .type = float zero_dose = False .type = bool .help = "Enable XSCALE zero dose extrapolation" } merge2cbf { merge_n_images = 2 .type = int(value_min=1) .help = "Number of input images to average into a single output image" data_range = None .type = ints(size=2, value_min=0) moving_average = False .type = bool .help = "If true, then perform a moving average over the sweep, i.e. given" "images 1, 2, 3, 4, 5, 6, ..., with averaging over three images," "the output frames would cover 1-3, 2-4, 3-5, 4-6, etc." "Otherwise, a straight summation is performed:" " 1-3, 4-6, 7-9, etc." } } dials .short_caption = "DIALS settings" { fix_geometry = False .type = bool .help = "Whether or not to refine geometry in dials.index and dials.refine." "Most useful when also providing a reference geometry to xia2." .short_caption = "Fix geometry" .expert_level = 1 outlier .short_caption = "Centroid outlier rejection" { algorithm = null *auto mcd tukey sauter_poon .type = choice .short_caption = "Outlier rejection algorithm" .expert_level = 1 } fast_mode = False .type = bool .help = "Set various parameters for rapid processing, compromising on quality" .short_caption = "Fast mode" .expert_level = 1 close_to_spindle_cutoff = 0.02 .type = float(value_min=0.0) .short_caption = "Closeness to the spindle cutoff for including reflections in refinement" .expert_level = 2 find_spots .short_caption = "Spot finding" { phil_file = None .type = path .short_caption = "phil file to pass to dials.find_spots" .expert_level = 1 min_spot_size = Auto .type = int .help = "The minimum number of contiguous pixels for a spot to be" "accepted by the filtering algorithm." .short_caption = "Minimum spot size" .expert_level = 1 min_local = 0 .type = int .help = "The minimum number of pixels under the image processing" "kernel that are need to do the thresholding operation." "Setting the value between 2 and the total number of pixels" "under the kernel will force the algorithm to use that number" "as the minimum. If the value is less than or equal to zero," "then the algorithm will use all pixels under the kernel. In" "effect this will add a border of pixels which are always" "classed as background around the edge of the image and around" "any masked out pixels." .expert_level=2 sigma_strong = None .type = float .help = "The number of standard deviations above the mean in the local" "area above which the pixel will be classified as strong." .short_caption = "Strong pixel sigma cutoff" .expert_level = 1 filter_ice_rings = False .type = bool .short_caption = "Filter ice rings" kernel_size = 3 .type = int .help = "The size of the local area around the spot in which to" "calculate the mean and variance. The kernel is given as a box" .expert_level = 1 global_threshold = None .type = float .help = "The global threshold value. Consider all pixels less than" "this value to be part of the background." .short_caption = "Global threshold cutoff" .expert_level = 1 } index .short_caption = "Indexing" { phil_file = None .type = path .short_caption = "phil file to pass to dials.index" .expert_level = 1 method = fft1d *fft3d real_space_grid_search .type = choice .short_caption = "Indexing method" max_cell = 0.0 .type = float .help = "Maximum length of candidate unit cell basis vectors (in Angstrom)." .short_caption = "Maximum cell length" .expert_level = 1 fft3d.n_points = None .type = int(value_min=0) .short_caption = "Number of reciprocal space grid points" .expert_level = 2 reflections_per_degree = 100 .type = int .short_caption = "Number of reflections per degree for random subset" .expert_level = 1 histogram_binning = linear log .type = choice .help = "Choose between linear or logarithmic bins for nearest neighbour" "histogram analysis." .expert_level = 2 } refine .short_caption = "Refinement" .expert_level = 1 { phil_file = None .type = path .short_caption = "phil file to pass to dials.refine" scan_static = True .expert_level = 2 .type = bool scan_varying = True .type = bool .short_caption = "Fit a scan-varying model" interval_width_degrees = 36.0 .type = float(value_min=0.) .help = "Width of scan between checkpoints in degrees" .short_caption = "Interval width between checkpoints (if scan-varying)" reflections_per_degree = 100 .type = int .short_caption = "Number of reflections per degree for random subset" } integrate .expert_level = 1 .short_caption = "Integration" { phil_file = None .type = path .short_caption = "phil file to pass to dials.integrate" background_outlier_algorithm = *null nsigma truncated normal tukey mosflm .type = choice .help = "Outlier rejection performed prior to background fit" .short_caption = "Outlier rejection method" background_algorithm = simple null *glm .type = choice .short_caption = "Background fit method" use_threading = False .type = bool .short_caption = "Use threading" .expert_level = 2 include_partials = True .type = bool .help = "Include partial reflections (scaled) in output" .short_caption = "Include partials" } } ccp4 .short_caption = "CCP4 data reduction options" .expert_level = 1 { reindex .short_caption = "reindex" { program = 'pointless' .type = str } aimless .short_caption = "aimless" { intensities = summation profile *combine .type = choice surface_tie = 0.001 .type = float .short_caption = "Surface tie" surface_link = True .type = bool .short_caption = "Surface link" secondary = 6 .type = int .expert_level = 2 .short_caption = "Aimless # secondary harmonics" } pointless .short_caption = "pointless" { chirality = chiral nonchiral centrosymmetric .type = choice } truncate .short_caption = "truncate" { program = 'ctruncate' .type = str } } strategy .multiple = True .optional = True .short_caption = "Strategy" .expert_level = 1 { name = None .type = str .help = "A name for this strategy." description = None .type = str .help = "A description associated with this strategy." i_over_sigi = 2.0 .type = float(value_min=0.0) .help = "Target <I/SigI> at highest resolution." minimize_total_time = False .type = bool target_resolution = None .type = float(value_min=0.0) max_total_exposure = None .type = float(value_min=0.0) .help = "maximum total exposure/measurement time, sec, default unlimited" anomalous = False .type = bool dose_rate = 0.0 .type = float(value_min=0.0) .help = "dose rate, Gray per Second, default 0.0 - radiation damage neglected" shape = 1.0 .type = float(value_min=0.0) .help = "shape factor, default 1, - increase for large crystal in a small beam" susceptibility = 1.0 .type = float(value_min=0.0) .help = "increase for radiation-sensitive crystals" completeness = 0.99 .type = float(value_min=0.0, value_max=1.0) .help = "Target completeness" multiplicity = None .type = float(value_min=0.0) .help = "Target multiplicity" phi_range = None .type = floats(size=2) .help = "Starting phi angle and total phi rotation range" min_oscillation_width = 0.05 .type = float(value_min=0.0) .help = "Minimum rotation width per frame (degrees)" xml_out = None .type = path .help = "XML-formatted data stored in file" max_rotation_speed = None .type = float(value_min=0.0) .help = "Maximum rotation speed (deg/sec)" min_exposure = None .type = float(value_min=0.0) .help = "Minimum exposure per frame (sec)" } xia2.settings .short_caption = "xia2 settings" { pipeline = 2d 2di 3d 3dd 3di 3dii *dials .short_caption = "main processing pipeline" .help = "Select the xia2 main processing pipeline" " 2d: MOSFLM, LABELIT (if installed), AIMLESS" " 2di: as 2d, but use 3 wedges for indexing" " 3d: XDS, XSCALE, LABELIT" " 3di: as 3d, but use 3 wedges for indexing" " 3dii: XDS, XSCALE, using all images for autoindexing" " 3dd: as 3d, but use DIALS for indexing" "dials: DIALS, AIMLESS" .type = choice small_molecule = False .type = bool .short_caption = "Use small molecule settings" .help = "Assume that the dataset comes from a" "chemical crystallography experiment" .expert_level = 1 failover = False .type = bool .short_caption = 'Fail over gracefully' .help = 'If processing a sweep fails, keep going' .expert_level = 1 multi_crystal = False .type = bool .short_caption = 'Settings for working with multiple crystals' .help = 'Settings for working with multiple crystals' .expert_level = 1 interactive = False .type = bool .short_caption = 'Interactive indexing' .expert_level = 1 project = 'AUTOMATIC' .type = str .help = "A name for the data processing project" crystal = 'DEFAULT' .type = str .help = "A name for the crystal" input .short_caption = "xia2 input settings" { atom = None .type = str .short_caption = "Heavy atom name, optional" .help = "Set the heavy atom name, if appropriate" anomalous = Auto .type = bool .short_caption = "Separate anomalous pairs in merging" .expert_level = 1 working_directory = None .type = path .short_caption = "Working directory (i.e. not $CWD)" .expert_level = 1 image = None .type = path .multiple = True .help = "image=/path/to/an/image_001.img" .short_caption = "Path to an image file" .expert_level = 1 json = None .type = path .multiple = True .help = "dxtbx-format datablock.json file which can be provided as an " "alternative source of images header information to avoid the " "need to read all the image headers on start-up." .short_caption = "Take headers from json file" .expert_level = 1 reference_geometry = None .type = path .multiple = True .help = "Experimental geometry from this datablock.json or " "experiments.json will override the geometry from the " "image headers." .short_caption = "Take experimental geometry from json file" .expert_level = 1 xinfo = None .type = path .help = "Provide an xinfo file as input as alternative to directory " "containing image files." .short_caption = "Use xinfo instead of image directory" .expert_level = 1 reverse_phi = False .type = bool .help = "Reverse the direction of the phi axis rotation." .short_caption = "Reverse rotation axis" .expert_level = 1 gain = None .type = float .help = "Detector gain if using DIALS" .short_caption = "Detector gain" .expert_level = 1 min_images = 10 .type = int(value_min=1) .help = "Minimum number of matching images to include a sweep in processing." .short_caption = "Minimum number of matching images" .expert_level = 1 min_oscillation_range = None .type = int(value_min=0) .help = "Minimum oscillation range of a sweep for inclusion in processing." .short_caption = "Minimum oscillation range" .expert_level = 1 include scope dials.util.options.tolerance_phil_scope include scope dials.util.options.geometry_phil_scope include scope dials.util.options.format_phil_scope } sweep .multiple = True .expert_level = 2 .short_caption = "xia2 sweep" { id = None .type = str range = None .type = ints(size=2) exclude = False .type = bool } scale .expert_level = 1 .short_caption = "Scaling" { directory = Auto .type = str .short_caption = "xia2 scale directory" free_fraction = 0.05 .type = float(value_min=0.0, value_max=1.0) .help = "Fraction of free reflections" free_total = None .type = int(value_min=0) .help = "Total number of free reflections" freer_file = None .type = path .help = "Copy freer flags from this file" reference_reflection_file = None .type = path .help = "Reference file for testing of alternative indexing schemes" model = *decay *modulation *absorption partiality .type = choice(multi=True) .short_caption = "Scaling models to apply" scales = *rotation batch .type = choice .short_caption = "Smoothed or batch scaling" } space_group = None .type = space_group .help = "Provide a target space group to the indexing program" .short_caption = "Space group" unit_cell = None .type = unit_cell .help = "Provide a target unit cell to the indexing program" .short_caption = "Unit cell (requires the space group to be set)" resolution .short_caption = "Resolution" { keep_all_reflections = Auto .type = bool .help = "Keep all data regardless of resolution criteria" .short_caption = "Keep all data (default for small molecule mode)" d_max = None .type = float(value_min=0.0) .help = "Low resolution cutoff." .short_caption = "Low resolution cutoff" d_min = None .type = float(value_min=0.0) .help = "High resolution cutoff." .short_caption = "High resolution cutoff" include scope xia2.Modules.Resolutionizer.phil_str } unify_setting = False .type = bool .help = "For one crystal, multiple orientations, unify U matrix" .short_caption = "Unify crystal orientations" .expert_level = 1 beam_centre = None .type = floats(size=2) .help = "Beam centre (x,y) coordinates (mm, mm) using the Mosflm convention" .short_caption = "Beam centre coordinates (mm, mm) using the Mosflm convention" trust_beam_centre = False .type = bool .help = "Whether or not to trust the beam centre in the image header." "If false, then labelit.index is used to determine a better beam " "centre during xia2 setup phase" .short_caption = "Trust beam centre" .expert_level = 1 wavelength_tolerance = 0.00005 .type = float(value_min=0.0) .help = "Tolerance for accepting two different wavelengths as the same wavelength." .short_caption = "Wavelength tolerance" .expert_level = 1 read_all_image_headers = True .type = bool .short_caption = "Read all image headers" .expert_level = 1 detector_distance = None .type = float(value_min=0.0) .help = "Distance between sample and detector (mm)" .short_caption = "Detector distance" .expert_level = 1 show_template = False .type = bool .short_caption = "Show template" .expert_level = 1 untrusted_rectangle_indexing = None .type = ints(size = 4) .multiple = True .short_caption = "Untrusted rectangle for indexing" .expert_level = 1 xds_cell_deviation = 0.05, 5.0 .type = floats(size = 2) .short_caption = "XDS cell deviation" .expert_level = 1 xds_check_cell_deviation = False .type = bool .short_caption = "Check cell deviation in XDS IDXREF" .expert_level = 1 use_brehm_diederichs = False .type = bool .help = "Use the Brehm-Diederichs algorithm to resolve an indexing " "ambiguity." "See: W. Brehm and K. Diederichs, Acta Cryst. (2014). D70, 101-109." .short_caption = "Brehm-Diederichs" .expert_level = 1 integration .short_caption = "Integration" .expert_level = 1 { profile_fitting = True .type = bool .help = "Use profile fitting not summation integration, default yes" .short_caption = "Use profile fitting" exclude_ice_regions = False .type = bool .help = "Exclude measurements from regions which are typically where " "ice rings land" .short_caption = "Exclude ice regions" } developmental .expert_level = 2 { use_dials_spotfinder = False .type = bool .help = "This feature requires the dials project to be installed, and" "is not currently intended for general use. Use at your peril!" pointless_tolerance = 0.0 .type = float(value_min=0.0) .help = "Tolerance to use in POINTLESS for comparison of data sets" detector_id = None .type = str .help = "Override detector serial number information" } multi_sweep_indexing = Auto .type = bool .help = "Index all sweeps together rather than combining individual results" "(requires dials indexer)" .expert_level = 2 remove_blanks = False .expert_level = 2 .type = bool integrate_p1 = False .type = bool .short_caption = "Integrate in P1" .expert_level = 1 reintegrate_correct_lattice = True .type = bool .short_caption = "Reintegrate using a corrected lattice" .expert_level = 1 lattice_rejection = True .type = bool .short_caption = "Reject lattice if constraints increase RMSD" .expert_level = 2 lattice_rejection_threshold = 1.5 .type = float .short_caption = "Threshold for lattice rejection" .expert_level = 2 xds .expert_level = 1 .short_caption = "xia2 XDS settings" { geometry_x = None .type = path geometry_y = None .type = path } indexer = mosflm labelit labelitii xds xdsii xdssum dials .type = choice .expert_level = 2 refiner = mosflm xds dials .type = choice .expert_level = 2 integrater = mosflmr xdsr mosflm xds dials .type = choice .expert_level = 2 scaler = ccp4a xdsa .type = choice .expert_level = 2 merging_statistics .short_caption = "Merging statistics" .expert_level = 1 { source = aimless *cctbx .type = choice .help = "Use AIMLESS or cctbx for calculation of merging statistics" .short_caption = "Software to calculate merging statistics" n_bins = 20 .type = int(value_min=1) .short_caption = "Number of bins" use_internal_variance = False .type = bool .help = Use internal variance of the data in the calculation of the merged sigmas .short_caption = "Use internal variance" eliminate_sys_absent = False .type = bool .help = Eliminate systematically absent reflections before computation of merging statistics. .short_caption = "Eliminate systematic absences before calculation" } verbose = False .type = bool .expert_level = 1 multiprocessing .short_caption = "Multiprocessing" .expert_level = 1 { mode = *serial parallel .type = choice .help = "Whether to process each sweep in serial (using n processes per" " sweep) or to process sweeps in parallel (using 1 process per" " sweep)." nproc = Auto .type = int(value_min=1) .help = "The number of processors to use per job." njob = Auto .type = int(value_min=1) .help = "The number of sweeps to process simultaneously." type = *simple qsub .type = choice .help = "How to run the parallel processing jobs, e.g. over a cluster" qsub_command = '' .type = str .help = "The command to use to submit qsub jobs" } } """, process_includes=True)
def __init__(self, iparams, write_pickle=True, write_logs=True, last_stage='integrate'): ''' Constructor :param iparams: IOTA params :param write_pickle: Set to True to write out an integration pickle ''' self.iparams = iparams self.write_pickle = write_pickle self.write_logs = write_logs self.last_stage = last_stage # Get Processor PHIL and initialize Processor if self.iparams.cctbx_xfel.target: with open(self.iparams.cctbx_xfel.target, 'r') as tf: tphil_string = tf.read() tparams = phil_scope.fetch(source=parse(tphil_string)).extract() else: tparams = phil_scope.extract() Processor.__init__(self, params=tparams) # IOTA-specific settings from here # Turn off all peripheral output self.params.output.experiments_filename = None self.params.output.indexed_filename = None self.params.output.strong_filename = None self.params.output.refined_experiments_filename = None self.params.output.integrated_experiments_filename = None self.params.output.integrated_filename = None self.params.output.profile_filename = None # Set customized parameters beamX = self.iparams.image_import.beam_center.x beamY = self.iparams.image_import.beam_center.y if beamX != 0 or beamY != 0: self.params.geometry.detector.slow_fast_beam_centre = '{} {}'.format( beamY, beamX) if self.iparams.image_import.distance != 0: self.params.geometry.detector.distance = self.iparams.image_import.distance if self.iparams.image_import.mask is not None: self.params.spotfinder.lookup.mask = self.iparams.image_import.mask self.params.integration.lookup.mask = self.iparams.image_import.mask if self.iparams.cctbx_xfel.target_space_group is not None: sg = self.iparams.cctbx_xfel.target_space_group self.params.indexing.known_symmetry.space_group = sg if self.iparams.cctbx_xfel.target_unit_cell is not None: uc = self.iparams.cctbx_xfel.target_unit_cell self.params.indexing.known_symmetry.unit_cell = uc if not self.params.indexing.stills.method_list: self.params.indexing.stills.method_list = ['fft1d', 'real_space_grid_search'] if self.iparams.cctbx_xfel.use_fft3d: self.params.indexing.stills.method_list.insert(2, 'fft3d') if self.iparams.cctbx_xfel.significance_filter.flag_on: sigma = self.iparams.cctbx_xfel.significance_filter.sigma sigma = sigma if sigma else 1 self.params.significance_filter.enable = True self.params.significance_filter.isigi_cutoff = sigma # Load reference geometry self.reference_detector = None if self.iparams.advanced.reference_geometry: from dxtbx.model.experiment_list import ExperimentListFactory try: ref_experiments = ExperimentListFactory.from_json_file( str(self.iparams.advanced.reference_geometry), check_format=False ) except Exception as e: print ('DEBUG: Could not make exp. list because: ', e) try: import dxtbx img = dxtbx.load(str(self.iparams.advanced.reference_geometry)) except Exception: print( "DEBUG: Couldn't load geometry file {}" "".format(self.iparams.advanced.reference_geometry) ) else: self.reference_detector = img.get_detector() else: assert len(ref_experiments.detectors()) == 1 self.reference_detector = ref_experiments.detectors()[0]
def submit_job(app, job): import os, libtbx.load_env from xfel.ui import settings_dir configs_dir = os.path.join(settings_dir, "cfgs") if not os.path.exists(configs_dir): os.makedirs(configs_dir) target_phil_path = os.path.join( configs_dir, "%s_%s_r%04d_t%03d_rg%03d_params.phil" % (app.params.experiment, app.params.experiment_tag, job.run.run, job.trial.trial, job.rungroup.id)) backend = ['labelit', 'dials'][['cxi.xtc_process', 'cctbx.xfel.xtc_process' ].index(app.params.dispatcher)] phil_str = job.trial.target_phil_str if job.rungroup.extra_phil_str is not None: phil_str += "\n" + job.rungroup.extra_phil_str if backend == 'dials': from xfel.command_line.xtc_process import phil_scope from iotbx.phil import parse trial_params = phil_scope.fetch(parse(phil_str)).extract() image_format = trial_params.format.file_format assert image_format in ['cbf', 'pickle'] else: image_format = 'pickle' if job.rungroup.calib_dir is not None or job.rungroup.config_str is not None or backend == 'labelit' or image_format == 'pickle': config_path = os.path.join( configs_dir, "%s_%s_r%04d_t%03d_rg%03d.cfg" % (app.params.experiment, app.params.experiment_tag, job.run.run, job.trial.trial, job.rungroup.id)) else: config_path = None # Dictionary for formating the submit phil and, if used, the labelit cfg file d = dict( # Generally for the LABELIT backend or image pickles address=job.rungroup.detector_address, default_calib_dir=libtbx.env.find_in_repositories( "xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0"), dark_avg_path=job.rungroup.dark_avg_path, dark_stddev_path=job.rungroup.dark_stddev_path, untrusted_pixel_mask_path=job.rungroup.untrusted_pixel_mask_path, detz_parameter=job.rungroup.detz_parameter, gain_map_path=job.rungroup.gain_map_path, gain_mask_level=job.rungroup.gain_mask_level, beamx=job.rungroup.beamx, beamy=job.rungroup.beamy, energy=job.rungroup.energy, binning=job.rungroup.binning, two_theta_low=12.5, # FIXME two_theta_high=22.8, # FIXME # Generally for job submission dry_run=app.params.dry_run, dispatcher=app.params.dispatcher, cfg=config_path, experiment=app.params.experiment, run_num=job.run.run, output_dir=app.params.output_folder, use_ffb=app.params.use_ffb, # Generally for both trial=job.trial.trial, rungroup=job.rungroup.rungroup_id, experiment_tag=app.params.experiment_tag, calib_dir=job.rungroup.calib_dir, nproc=app.params.mp.nproc, queue=app.params.mp.queue, target=target_phil_path, host=app.params.db.host, dbname=app.params.db.name, user=app.params.db.user, ) if app.params.db.password is not None and len(app.params.db.password) == 0: d['password'] = None else: d['password'] = app.params.db.password phil = open(target_phil_path, "w") if backend == 'dials': if trial_params.format.file_format == "cbf": trial_params.format.cbf.detz_offset = job.rungroup.detz_parameter trial_params.format.cbf.override_energy = job.rungroup.energy trial_params.format.cbf.invalid_pixel_mask = job.rungroup.untrusted_pixel_mask_path trial_params.format.cbf.gain_mask_value = job.rungroup.gain_mask_level trial_params.dispatch.process_percent = job.trial.process_percent working_phil = phil_scope.format(python_object=trial_params) diff_phil = phil_scope.fetch_diff(source=working_phil) phil.write(diff_phil.as_str()) elif backend == 'labelit': phil.write(phil_str) else: assert False phil.close() if config_path is not None: if backend == 'dials': d['untrusted_pixel_mask_path'] = None # Don't pass a pixel mask to mod_image_dict as it will # will be used during dials processing directly config_str = "[psana]\n" if job.rungroup.calib_dir is not None: config_str += "calib-dir=%s\n" % job.rungroup.calib_dir modules = [] if job.rungroup.config_str is not None: for line in job.rungroup.config_str.split("\n"): if line.startswith('['): modules.append(line.lstrip('[').rstrip(']')) if backend == 'labelit': modules.insert(0, 'my_ana_pkg.mod_radial_average') modules.extend( ['my_ana_pkg.mod_hitfind:index', 'my_ana_pkg.mod_dump:index']) elif image_format == 'pickle': modules.extend(['my_ana_pkg.mod_image_dict']) if app.params.dump_shots: modules.insert(0, 'my_ana_pkg.mod_dump:shot') if len(modules) > 0: config_str += "modules = %s\n" % (" ".join(modules)) if job.rungroup.config_str is not None: config_str += job.rungroup.config_str + "\n" if backend == 'labelit' or image_format == 'pickle': d['address'] = d['address'].replace('.', '-').replace( ':', '|') # old style address if backend == 'labelit': template = open( os.path.join( libtbx.env.find_in_repositories("xfel/ui/db/cfgs"), "index_all.cfg")) elif image_format == 'pickle': template = open( os.path.join( libtbx.env.find_in_repositories("xfel/ui/db/cfgs"), "image_dict.cfg")) for line in template.readlines(): config_str += line.format(**d) template.close() d['address'] = job.rungroup.detector_address cfg = open(config_path, 'w') cfg.write(config_str) cfg.close() if backend == 'dials': d['untrusted_pixel_mask_path'] = job.rungroup.untrusted_pixel_mask_path submit_phil_path = os.path.join( configs_dir, "%s_%s_r%04d_t%03d_rg%03d_submit.phil" % (app.params.experiment, app.params.experiment_tag, job.run.run, job.trial.trial, job.rungroup.id)) template = open( os.path.join(libtbx.env.find_in_repositories("xfel/ui/db/cfgs"), "submit.phil")) phil = open(submit_phil_path, "w") if backend == 'labelit': d['target'] = None # any target phil will be in mod_hitfind for line in template.readlines(): phil.write(line.format(**d)) d['target'] = target_phil_path template.close() phil.close() from xfel.command_line.cxi_mpi_submit import Script as submit_script return submit_script().run([submit_phil_path])
from xfel.ui.db.frame_logging import DialsProcessorWithLogging from xfel.small_cell.command_line.small_cell_process import Processor as SmallCellProcessor class SmallCellProcessorWithLogging(DialsProcessorWithLogging, SmallCellProcessor): pass from dials.command_line.stills_process import dials_phil_str, program_defaults_phil_str, Script as DialsScript, control_phil_str as dials_control_phil_str from xfel.ui import db_phil_str from xfel.small_cell.command_line.small_cell_process import program_defaults_phil_str as small_cell_program_defaults_phil_str phil_scope = parse(dials_control_phil_str + control_phil_str + dials_phil_str + db_phil_str + radial_average_phil_str + small_cell_phil_str, process_includes=True).fetch( parse(program_defaults_phil_str + small_cell_program_defaults_phil_str)) phil_scope = phil_scope.fetch(parse(delete_shoeboxes_override_str)) class Script(DialsScript): '''A class for running the script.''' def __init__(self): '''Initialise the script.''' from dials.util.options import OptionParser import libtbx.load_env # The script usage usage = "usage: %s [options] [param.phil] filenames" % libtbx.env.dispatcher_name
def generate_phil_scope(): ''' Generate the phil scope. ''' from dials.interfaces import BackgroundIface from dials.interfaces import IntensityIface from dials.interfaces import CentroidIface phil_scope = phil.parse(''' integration { include scope dials.data.lookup.phil_scope mp { method = *multiprocessing sge lsf pbs .type = choice .help = "The multiprocessing method to use" max_procs = 1 .type = int(value_min=1) .help = "The number of processes to use." } block { size = 10 .type = float .help = "The block size in rotation angle (degrees)." } shoebox { n_sigma = 3 .help = "The number of standard deviations of the beam divergence and the" "mosaicity to use for the bounding box size." .type = float sigma_b = None .help = "The E.S.D. of the beam divergence" .type = float sigma_m = None .help = "The E.S.D. of the reflecting range" .type = float } filter { min_zeta = 0.05 .help = "Filter the reflections by the value of zeta. A value of less" "than or equal to zero indicates that this will not be used. A" "positive value is used as the minimum permissable value." .type = float ice_rings { filter = False .type = bool unit_cell = 4.498,4.498,7.338,90,90,120 .type = unit_cell .help = "The unit cell to generate d_spacings for ice rings." space_group = 194 .type = space_group .help = "The space group used to generate d_spacings for ice rings." d_min = 0 .type = int(value_min=0) .help = "The minimum resolution to filter ice rings" width = 0.06 .type = float(value_min=0.0) .help = "The width of an ice ring (in d-spacing)." } } } ''', process_includes=True) main_scope = phil_scope.get_without_substitution("integration") assert(len(main_scope) == 1) main_scope = main_scope[0] main_scope.adopt_scope(BackgroundIface.phil_scope()) main_scope.adopt_scope(IntensityIface.phil_scope()) main_scope.adopt_scope(CentroidIface.phil_scope()) return phil_scope
scope = phil.parse(""" mode = *translate ellipse .type = choice translate .help = "Options for applying separate translation offsets to each panel" { dx = 0.0 .type = floats .help = "Translation in pixels to be applied along the fast direction." "The number of values supplied should equal the number of panels." dy = 0.0 .type = floats .help = "Translation in pixels to be applied along the slow direction." "The number of values supplied should equal the number of panels." } ellipse .help = "Options for correcting for elliptical distortion of images." "Defaults set for correction of datasets published in" "https://doi.org/10.1107/S2059798317010348" { phi = -21.0 .type = float .help = "Acute angle of one principal axis of the ellipse from the fast" "axis of the first panel of the detector" l1 = 1.0 .type = float .help = "Scale factor for first axis of the ellipse" l2 = 0.956 .type = float .help = "Scale factor for second axis of the ellipse" centre_xy = 33.2475,33.2475 .type = floats(size=2) .help = "Centre of the ellipse in millimetres along fast, slow of the" "first panel" } output { x_map = dx.pickle .type = str y_map = dy.pickle .type = str log = dials.generate_distortion_maps.log .type = str } """)
master_phil = parse(""" general { check_image_files_readable = True .type = bool } xds { delphi = 5 .type = float delphi_small = 30 .type = float untrusted_ellipse = None .type = ints(size = 4) .multiple = True untrusted_rectangle = None .type = ints(size = 4) .multiple = True trusted_region = None .type = floats(size = 2) profile_grid_size = None .type = ints(size = 2) correct { refine = *DISTANCE *BEAM *AXIS *ORIENTATION *CELL *POSITION .type = choice(multi = True) .help = 'what to refine in the CORRECT step' } integrate { refine = *ORIENTATION *CELL *BEAM *DISTANCE AXIS *POSITION .type = choice(multi = True) .help = 'what to refine in first pass of integration' refine_final = *ORIENTATION *CELL BEAM DISTANCE AXIS POSITION .type = choice(multi = True) .help = 'what to refine in final pass of integration' fix_scale = False .type = bool delphi = 0 .type = float reflecting_range = 0 .type = float reflecting_range_esd = 0 .type = float beam_divergence = 0 .type = float beam_divergence_esd = 0 .type = float reintegrate = true .type = bool profile_fitting = True .type = bool } init { fix_scale = False .type = bool } index { refine = *ORIENTATION *CELL *BEAM *DISTANCE *AXIS *POSITION .type = choice(multi = True) .help = 'what to refine in autoindexing' debug = *OFF ON .type = choice(multi = False) .help = 'output enganced debugging for indexing' } colspot { minimum_pixels_per_spot = 1 .type = int } xscale { min_isigma = 3.0 .type = float } merge2cbf { merge_n_images = 2 .type = int(value_min=1) .help = "Number of input images to average into a single output image" data_range = None .type = ints(size=2, value_min=0) moving_average = False .type = bool .help = "If true, then perform a moving average over the sweep, i.e. given" "images 1, 2, 3, 4, 5, 6, ..., with averaging over three images," "the output frames would cover 1-3, 2-4, 3-5, 4-6, etc." "Otherwise, a straight summation is performed:" " 1-3, 4-6, 7-9, etc." } } dials { fix_geometry = False .type = bool .help = "Whether or not to refine geometry in dials.index and dials.refine." "Most useful when also providing a reference geometry to xia2." outlier.algorithm = null *auto mcd tukey sauter_poon .type = choice fast_mode = False .type = bool close_to_spindle_cutoff = 0.02 .type = float(value_min=0.0) find_spots { min_spot_size = Auto .type = int min_local = 0 .type = int phil_file = None .type = path sigma_strong = None .type = float filter_ice_rings = False .type = bool kernel_size = 3 .type = int global_threshold = None .type = float } index { method = fft1d *fft3d real_space_grid_search .type = choice phil_file = None .type = path max_cell = 0.0 .type = float fft3d.n_points = None .type = int(value_min=0) reflections_per_degree = 100 .type = int } refine { scan_varying = True .type = bool interval_width_degrees = 36.0 .help = "Width of scan between checkpoints in degrees" .type = float(value_min=0.) phil_file = None .type = path reflections_per_degree = 100 .type = int } integrate { phil_file = None .type = path profile_fitting = True .type = bool background_outlier_algorithm = *null nsigma truncated normal tukey mosflm .type = choice background_algorithm = simple null *glm .type = choice use_threading = False .type = bool include_partials = True .type = bool } } ccp4 { truncate { program = 'ctruncate' .type = str } reindex { program = 'pointless' .type = str } aimless { intensities = summation profile *combine .type = choice surface_tie = 0.001 .type = float surface_link = True .type = bool } } xia2.settings { input { image = None .type = path .multiple = True .help = "image=/path/to/an/image_001.img" json = None .type = path .multiple = True .help = "dxtbx-format datablock.json file which can be provided as an " "alternative source of images header information to avoid the " "need to read all the image headers on start-up." reference_geometry = None .type = path .multiple = True .help = "Experimental geometry from this datablock.json or " "experiments.json will override the geometry from the " "image headers." reverse_phi = False .type = bool .help = "Reverse the direction of the phi axis rotation." xinfo = None .type = path .help = "Provide an xinfo file as input as alternative to directory " "containing image files." gain = None .type = float .help = "Detector gain if using DIALS" min_images = 10 .type = int(value_min=1) .help = "Minimum number of matching images to include a sweep in processing." min_oscillation_range = None .type = int(value_min=0) .help = "Minimum oscillation range of a sweep for inclusion in processing." } sweep .multiple = True { id = None .type = str range = None .type = ints(size=2) exclude = False .type = bool } scale { directory = Auto .type = str } unit_cell = None .type = unit_cell .help = "Provide a target unit cell to the indexing program" space_group = None .type = space_group .help = "Provide a target space group to the indexing program" resolution { d_max = None .type = float(value_min=0.0) .help = "Low resolution cutoff." d_min = None .type = float(value_min=0.0) .help = "High resolution cutoff." rmerge = None .type = float(value_min=0) .help = "Minimum value of Rmerge in the outer resolution shell" completeness = None .type = float(value_min=0) .help = "Minimum completeness in the outer resolution shell" cc_half = 0.5 .type = float(value_min=0) .help = "Minimum value of CC1/2 in the outer resolution shell" isigma = 0.25 .type = float(value_min=0) .help = "Minimum value of the unmerged <I/sigI> in the outer resolution shell" misigma = 1.0 .type = float(value_min=0) .help = "Minimum value of the merged <I/sigI> in the outer resolution shell" } optimize_scaling = False .type = bool .help = "Search for best scaling model" unify_setting = False .type = bool .help = "For one crystal, multiple orientations, unify U matrix" beam_centre = None .type = floats(size=2) .help = "Beam centre (x,y) coordinates (mm) using the Mosflm convention" trust_beam_centre = False .type = bool .help = "Whether or not to trust the beam centre in the image header." "If false, then labelit.index is used to determine a better beam " "centre during xia2 setup phase" wavelength_tolerance = 0.00001 .type = float(value_min=0.0) .help = "Tolerance for accepting two different wavelengths as the same wavelength." read_all_image_headers = True .type = bool detector_distance = None .type = float(value_min=0.0) .help = "Distance between sample and detector (mm)" show_template = False .type = bool untrusted_rectangle_indexing = None .type = ints(size = 4) .multiple = True xds_cell_deviation = 0.05, 5.0 .type = floats(size = 2) use_brehm_diederichs = False .type = bool .help = "Use the Brehm-Diederichs algorithm to resolve an indexing " "ambiguity." "See: W. Brehm and K. Diederichs, Acta Cryst. (2014). D70, 101-109." developmental .expert_level = 2 { use_dials_spotfinder = False .type = bool .help = "This feature requires the dials project to be installed, and" "is not currently intended for general use. Use at your peril!" pointless_tolerance = 0.0 .type = float(value_min=0.0) .help = "Tolerance to use in POINTLESS for comparison of data sets" multi_sweep_indexing = False .type = bool } integrate_p1 = False .type = bool reintegrate_correct_lattice = True .type = bool xds { geometry_x = None .type = path geometry_y = None .type = path } indexer = mosflm labelit labelitii xds xdsii xdssum dials .type = choice refiner = mosflm xds dials .type = choice integrater = mosflmr xdsr mosflm xds dials .type = choice scaler = ccp4a xdsa .type = choice merging_statistics { source = aimless *cctbx .type = choice .help = "Use AIMLESS or cctbx for calculation of merging statistics" n_bins = 20 .type = int(value_min=1) use_internal_variance = False .type = bool .help = Use internal variance of the data in the calculation of the merged sigmas eliminate_sys_absent = False .type = bool .help = Eliminate systematically absent reflections before computation of merging statistics. } verbose = False .type = bool multiprocessing { mode = *serial parallel .type = choice .help = "Whether to process each sweep in serial (using n processes per" " sweep) or to process sweeps in parallel (using 1 process per" " sweep)." nproc = Auto .type = int(value_min=1) .help = "The number of processors to use per job." njob = Auto .type = int(value_min=1) .help = "The number of sweeps to process simultaneously." type = *simple qsub .type = choice .help = "How to run the parallel processing jobs, e.g. over a cluster" qsub_command = '' .type = str .help = "The command to use to submit qsub jobs" } } """, process_includes=True)
def onAdvancedOptions(self, e): advanced = PRIMEAdvancedOptions(self, title='Advanced PRIME Options', style=wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER) advanced.Fit() # Populate the PHIL textbox advanced.phil.ctr.SetValue(self.phil_string) # Set values to default parameters advanced.res.high.SetValue('{:4.2f}'.format(self.pparams.scale.d_max)) advanced.res.low.SetValue('{:4.2f}'.format(self.pparams.scale.d_min)) advanced.sg.spacegroup.SetValue(str(self.pparams.target_space_group)) if str(self.pparams.target_unit_cell).lower() != 'none': uc = ' '.join(list(map(str, self.pparams.target_unit_cell.parameters()))) advanced.uc.unit_cell.SetValue(uc) else: advanced.uc.unit_cell.SetValue(str(self.pparams.target_unit_cell)) advanced.uc_override.SetValue(self.pparams.flag_override_unit_cell) advanced.anom.SetValue(self.pparams.target_anomalous_flag) advanced.cc.cc_cutoff.SetValue(str(self.pparams.frame_accept_min_cc)) advanced.pix.pixel_size.SetValue(str(self.pparams.pixel_size_mm)) advanced.cycles.ctr.SetValue(int(self.pparams.n_postref_cycle)) if advanced.ShowModal() == wx.ID_OK: # Read PHIL string from window, convert to params self.phil_string = advanced.phil.ctr.GetValue() new_phil = ip.parse(self.phil_string) self.pparams = master_phil.fetch(sources=[new_phil]).extract() # Param controls will override the PHIL string (clunky, but for now) self.pparams.scale.d_max = float(advanced.res.high.GetValue()) self.pparams.scale.d_min = float(advanced.res.low.GetValue()) self.pparams.merge.d_max = float(advanced.res.high.GetValue()) self.pparams.merge.d_min = float(advanced.res.low.GetValue()) self.pparams.postref.scale.d_max = float(advanced.res.high.GetValue()) self.pparams.postref.scale.d_min = float(advanced.res.low.GetValue()) self.pparams.postref.crystal_orientation.d_max = \ float(advanced.res.high.GetValue()) self.pparams.postref.crystal_orientation.d_min = \ float(advanced.res.low.GetValue()) self.pparams.postref.reflecting_range.d_max = \ float(advanced.res.high.GetValue()) self.pparams.postref.reflecting_range.d_min = \ float(advanced.res.low.GetValue()) self.pparams.postref.unit_cell.d_max = float(advanced.res.high.GetValue()) self.pparams.postref.unit_cell.d_min = float(advanced.res.low.GetValue()) self.pparams.postref.allparams.d_max = float(advanced.res.high.GetValue()) self.pparams.postref.allparams.d_min = float(advanced.res.low.GetValue()) self.pparams.target_space_group = advanced.sg.spacegroup.GetValue() if advanced.uc.unit_cell.GetValue().lower() != 'none': uc = str_split(advanced.uc.unit_cell.GetValue()) self.pparams.target_unit_cell = unit_cell(list(map(float, uc))) else: self.pparams.target_unit_cell = None self.pparams.flag_override_unit_cell = advanced.uc_override.GetValue() self.pparams.target_anomalous_flag = advanced.anom.GetValue() if advanced.cc.cc_cutoff.GetValue().lower() != 'none': self.pparams.frame_accept_min_cc = float(advanced.cc.cc_cutoff.GetValue()) else: self.pparams.frame_accept_min_cc = None if advanced.pix.pixel_size.GetValue().lower() != 'none': self.pparams.pixel_size_mm = float(advanced.pix.pixel_size.GetValue()) else: self.pparams.pixel_size_mm = None self.pparams.n_postref_cycle = int(advanced.cycles.ctr.GetValue()) self.regenerate_params(self.pparams) advanced.Destroy() e.Skip()
phil_scope = parse( """ border = 0 .type = int .help = "The border around the edge of the image." use_trusted_range = False .type = bool .help = "Use the trusted range to mask bad pixels." d_min = None .help = "The high resolution limit in Angstrom for a pixel to be" "accepted by the filtering algorithm." .type = float(value_min=0) d_max = None .help = "The low resolution limit in Angstrom for a pixel to be" "accepted by the filtering algorithm." .type = float(value_min=0) resolution_range = None .multiple = true .type = floats(2) .help = "an untrusted resolution range" untrusted .multiple = True { panel = None .type = int .help = "The panel number" .help = "If no geometric attributes are given (circle, rectangle, etc)" .help = "then the whole panel is masked out" circle = None .type = ints(3) .help = "An untrusted circle (xc, yc, r)" rectangle = None .type = ints(4) .help = "An untrusted rectangle (x0, x1, y0, y1)" polygon = None .type = ints(value_min=0) .help = "The pixel coordinates (fast, slow) that define the corners " "of the untrusted polygon. Spots whose centroids fall within " "the bounds of the untrusted polygon will be rejected." pixel = None .type = ints(2, value_min=0) .help = "An untrusted pixel (y, x)" } ice_rings { filter = False .type = bool unit_cell = 4.498,4.498,7.338,90,90,120 .type = unit_cell .help = "The unit cell to generate d_spacings for powder rings." .expert_level = 1 space_group = 194 .type = space_group .help = "The space group used to generate d_spacings for powder rings." .expert_level = 1 width = 0.002 .type = float(value_min=0.0) .help = "The width of an ice ring (in 1/d^2)." .expert_level = 1 d_min = None .type = float(value_min=0.0) .help = "The high resolution limit (otherwise use detector d_min)" .expert_level = 1 } """, process_includes=True, )
def submit_job(app, job): import os, libtbx.load_env from xfel.ui import settings_dir configs_dir = os.path.join(settings_dir, "cfgs") if not os.path.exists(configs_dir): os.makedirs(configs_dir) target_phil_path = os.path.join(configs_dir, "%s_%s_r%04d_t%03d_rg%03d_params.phil"% (app.params.experiment, app.params.experiment_tag, job.run.run, job.trial.trial, job.rungroup.id)) backend = ['labelit', 'dials'][['cxi.xtc_process', 'cctbx.xfel.xtc_process'].index(app.params.dispatcher)] phil_str = job.trial.target_phil_str if job.rungroup.extra_phil_str is not None: phil_str += "\n" + job.rungroup.extra_phil_str if backend == 'dials': from xfel.command_line.xtc_process import phil_scope from iotbx.phil import parse trial_params = phil_scope.fetch(parse(phil_str)).extract() image_format = trial_params.format.file_format assert image_format in ['cbf', 'pickle'] else: image_format = 'pickle' if job.rungroup.calib_dir is not None or job.rungroup.config_str is not None or backend == 'labelit' or image_format == 'pickle': config_path = os.path.join(configs_dir, "%s_%s_r%04d_t%03d_rg%03d.cfg"% (app.params.experiment, app.params.experiment_tag, job.run.run, job.trial.trial, job.rungroup.id)) else: config_path = None # Dictionary for formating the submit phil and, if used, the labelit cfg file d = dict( # Generally for the LABELIT backend or image pickles address = job.rungroup.detector_address, default_calib_dir = libtbx.env.find_in_repositories("xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0"), dark_avg_path = job.rungroup.dark_avg_path, dark_stddev_path = job.rungroup.dark_stddev_path, untrusted_pixel_mask_path = job.rungroup.untrusted_pixel_mask_path, detz_parameter = job.rungroup.detz_parameter, gain_map_path = job.rungroup.gain_map_path, gain_mask_level = job.rungroup.gain_mask_level, beamx = job.rungroup.beamx, beamy = job.rungroup.beamy, energy = job.rungroup.energy, binning = job.rungroup.binning, two_theta_low = 12.5, # FIXME two_theta_high = 22.8, # FIXME # Generally for job submission dry_run = app.params.dry_run, dispatcher = app.params.dispatcher, cfg = config_path, experiment = app.params.experiment, run_num = job.run.run, output_dir = app.params.output_folder, use_ffb = app.params.use_ffb, # Generally for both trial = job.trial.trial, rungroup = job.rungroup.rungroup_id, experiment_tag = app.params.experiment_tag, calib_dir = job.rungroup.calib_dir, nproc = app.params.mp.nproc, queue = app.params.mp.queue, target = target_phil_path, host = app.params.db.host, dbname = app.params.db.name, user = app.params.db.user, ) if app.params.db.password is not None and len(app.params.db.password) == 0: d['password'] = None else: d['password'] = app.params.db.password phil = open(target_phil_path, "w") if backend == 'dials': if trial_params.format.file_format == "cbf": trial_params.format.cbf.detz_offset = job.rungroup.detz_parameter trial_params.format.cbf.override_energy = job.rungroup.energy trial_params.format.cbf.invalid_pixel_mask = job.rungroup.untrusted_pixel_mask_path trial_params.format.cbf.gain_mask_value = job.rungroup.gain_mask_level trial_params.dispatch.process_percent = job.trial.process_percent working_phil = phil_scope.format(python_object=trial_params) diff_phil = phil_scope.fetch_diff(source=working_phil) phil.write(diff_phil.as_str()) elif backend == 'labelit': phil.write(phil_str) else: assert False phil.close() if config_path is not None: if backend == 'dials': d['untrusted_pixel_mask_path'] = None # Don't pass a pixel mask to mod_image_dict as it will # will be used during dials processing directly config_str = "[psana]\n" if job.rungroup.calib_dir is not None: config_str += "calib-dir=%s\n"%job.rungroup.calib_dir modules = [] if job.rungroup.config_str is not None: for line in job.rungroup.config_str.split("\n"): if line.startswith('['): modules.append(line.lstrip('[').rstrip(']')) if backend == 'labelit': modules.insert(0, 'my_ana_pkg.mod_radial_average') modules.extend(['my_ana_pkg.mod_hitfind:index','my_ana_pkg.mod_dump:index']) elif image_format == 'pickle': modules.extend(['my_ana_pkg.mod_image_dict']) if app.params.dump_shots: modules.insert(0, 'my_ana_pkg.mod_dump:shot') if len(modules) > 0: config_str += "modules = %s\n"%(" ".join(modules)) if job.rungroup.config_str is not None: config_str += job.rungroup.config_str + "\n" if backend == 'labelit' or image_format == 'pickle': d['address'] = d['address'].replace('.','-').replace(':','|') # old style address if backend == 'labelit': template = open(os.path.join(libtbx.env.find_in_repositories("xfel/ui/db/cfgs"), "index_all.cfg")) elif image_format == 'pickle': template = open(os.path.join(libtbx.env.find_in_repositories("xfel/ui/db/cfgs"), "image_dict.cfg")) for line in template.readlines(): config_str += line.format(**d) template.close() d['address'] = job.rungroup.detector_address cfg = open(config_path, 'w') cfg.write(config_str) cfg.close() if backend == 'dials': d['untrusted_pixel_mask_path'] = job.rungroup.untrusted_pixel_mask_path submit_phil_path = os.path.join(configs_dir, "%s_%s_r%04d_t%03d_rg%03d_submit.phil"% (app.params.experiment, app.params.experiment_tag, job.run.run, job.trial.trial, job.rungroup.id)) template = open(os.path.join(libtbx.env.find_in_repositories("xfel/ui/db/cfgs"), "submit.phil")) phil = open(submit_phil_path, "w") if backend == 'labelit': d['target'] = None # any target phil will be in mod_hitfind for line in template.readlines(): phil.write(line.format(**d)) d['target'] = target_phil_path template.close() phil.close() from xfel.command_line.cxi_mpi_submit import Script as submit_script return submit_script().run([submit_phil_path])
phil_scope = phil.parse( """ assess_space_group = True .type = bool .help = "Option to assess space group by testing presence of axial reflections" anomalous = True .type = bool .help = "Output anomalous as well as mean intensities." truncate = True .type = bool .help = "Option to perform truncation on merged data." d_min = None .type = float .help = "High resolution limit to apply to the data." d_max = None .type = float .help = "Low resolution limit to apply to the data." wavelength_tolerance = 1e-4 .type = float(value_min=0.0) .help = "Absolute tolerance for determining wavelength grouping for merging." combine_partials = True .type = bool .help = "Combine partials that have the same partial id into one reflection, with an updated partiality given by the sum of the individual partialities." partiality_threshold=0.4 .type = float .help = "All reflections with partiality values above the partiality threshold will be retained. This is done after any combination of partials if applicable." best_unit_cell = None .type = unit_cell .help = "Best unit cell value, to use when performing resolution cutting," "and as the overall unit cell in the merged mtz. If undefined, the median" "cell will be used." n_residues = 200 .type = int .help = "Number of residues to use in Wilson scaling" merging { use_internal_variance = False .type = bool n_bins = 20 .type = int(value_min=5) anomalous = False .type = bool .help = "Option to control whether reported merging stats are anomalous." } output { log = dials.merge.log .type = str mtz = merged.mtz .type = str .help = "Filename to use for mtz output." html = dials.merge.html .type = str .help = "Filename for html output report." crystal_names = XTAL .type = strings .help = "Crystal name to be used in MTZ file output (multiple names allowed for MAD datasets)" project_name = AUTOMATIC .type = str .help = "Project name to be used in MTZ file output" dataset_names = NATIVE .type = strings .help = "Dataset name to be used in MTZ file output (multiple names allowed for MAD datasets)" } """, process_includes=True, )
phil_scope = parse(""" border = 0 .type = int .help = "The border around the edge of the image." use_trusted_range = True .type = bool .help = "Use the trusted range to mask bad pixels." d_min = None .help = "The high resolution limit in Angstrom for a pixel to be" "accepted by the filtering algorithm." .type = float(value_min=0) d_max = None .help = "The low resolution limit in Angstrom for a pixel to be" "accepted by the filtering algorithm." .type = float(value_min=0) resolution_range = None .multiple = true .type = floats(2) .help = "an untrusted resolution range" untrusted .multiple = True { panel = 0 .type = int .help = "The panel number" circle = None .type = ints(3) .help = "An untrusted circle (xc, yc, r)" rectangle = None .type = ints(4) .help = "An untrusted rectangle (x0, x1, y0, y1)" polygon = None .type = ints(value_min=0) .help = "The pixel coordinates (fast, slow) that define the corners " "of the untrusted polygon. Spots whose centroids fall within " "the bounds of the untrusted polygon will be rejected." } ice_rings { filter = False .type = bool unit_cell = 4.498,4.498,7.338,90,90,120 .type = unit_cell .help = "The unit cell to generate d_spacings for powder rings." .expert_level = 1 space_group = 194 .type = space_group .help = "The space group used to generate d_spacings for powder rings." .expert_level = 1 width = 0.06 .type = float(value_min=0.0) .help = "The width of an ice ring (in d-spacing)." .expert_level = 1 d_min = None .type = float(value_min=0.0) .help = "The high resolution limit (otherwise use detector d_min)" } """, process_includes=True)
from dials.array_family import flex # The phil scope phil_scope = parse(""" powder { water_ice { unit_cell = 4.498,4.498,7.338,90,90,120 .type = unit_cell .help = "The unit cell to generate d_spacings for ice rings." space_group = 194 .type = space_group .help = "The space group used to generate d_spacings for ice rings." d_min = 1 .type = float(value_min=0.0) .help = "The minimum resolution to filter ice rings" width = 0.002 .type = float(value_min=0.0) .help = "The width of an ice ring (in 1/d^2)." } apply = *none water_ice .type = choice(multi=True) .help = "The power ring filters to apply" } """) class PowderRingFilter: """
def generate_phil_scope(): from iotbx.phil import parse import dials.extensions # import dependency from dials.interfaces import SpotFinderThresholdIface phil_scope = parse(''' spotfinder .help = "Parameters used in the spot finding algorithm." { include scope dials.data.lookup.phil_scope write_hot_mask = False .type = bool .help = "Write the hot mask" scan_range = None .help = "The range of images to use in finding spots. Number of arguments" "must be a factor of two. Specifying \"0 0\" will use all images" "by default. The given range follows C conventions" "(e.g. j0 <= j < j1)." "For sweeps the scan range is interpreted as the literal scan" "range. Whereas for imagesets the scan range is interpreted as" "the image number in the imageset" .type = ints(size=2) .multiple = True region_of_interest = None .type = ints(size=4) .help = "A region of interest to look for spots." "Specified as: x0,x1,y0,y1" "The pixels x0 and y0 are included in the range but the pixels x1 and y1" "are not. To specify an ROI covering the whole image set" "region_of_interest=0,width,0,height." filter .help = "Parameters used in the spot finding filter strategy." { min_spot_size = Auto .help = "The minimum number of contiguous pixels for a spot" "to be accepted by the filtering algorithm." .type = int(value_min=1) max_spot_size = 100 .help = "The maximum number of contiguous pixels for a spot" "to be accepted by the filtering algorithm." .type = int(value_min=1) max_separation = 2 .help = "The maximum peak-to-centroid separation (in pixels)" "for a spot to be accepted by the filtering algorithm." .type = float(value_min=0) .expert_level = 1 max_strong_pixel_fraction = 0.25 .help = "If the fraction of pixels in an image marked as strong is" "greater than this value, throw an exception" .type = float(value_min=0, value_max=1) background_gradient .expert_level=2 { filter = False .type = bool background_size = 2 .type = int(value_min=1) gradient_cutoff = 4 .type = float(value_min=0) } spot_density .expert_level=2 { filter = False .type = bool } include scope dials.util.masking.phil_scope } mp { method = *multiprocessing sge lsf pbs .type = choice .help = "The multiprocessing method to use" nproc = 1 .type = int(value_min=1) .help = "The number of processes to use." chunksize = 20 .type = int(value_min=1) .help = "The number of jobs to process per process" } } ''', process_includes=True) main_scope = phil_scope.get_without_substitution("spotfinder") assert(len(main_scope) == 1) main_scope = main_scope[0] main_scope.adopt_scope(SpotFinderThresholdIface.phil_scope()) return phil_scope
def initialize_spotfinder(self): self.data_folder = None self.done_list = [] self.data_list = [] self.spotfinding_info = [] self.plot_idx = 0 self.bookmark = 0 self.all_info = [] self.current_min_bragg = 0 self.waiting = False self.submit_new_images = False self.terminated = False # Read arguments if any self.args, self.phil_args = parse_command_args("").parse_known_args() # Generate DIALS PHIL file if self.args.paramfile is None: default_phil = ip.parse(default_target) self.phil = phil_scope.fetch(source=default_phil) else: with open(self.args.paramfile, "r") as phil_file: phil_string = phil_file.read() user_phil = ip.parse(phil_string) self.phil = phil_scope.fetch(source=user_phil) self.params = self.phil.extract() # Set backend self.spf_backend = self.args.backend # Determine how far the DIALS processing will go if "index" in self.args.action: self.run_indexing = True elif "int" in self.args.action: self.run_indexing = True self.run_integration = True self.tracker_panel.min_bragg.ctr.SetValue(self.args.bragg) # Determine how the tracker will track images: from file output by # iota.single_image, or by turning over actual files. If the latter, # determine at what point the tracker will start the tracking auto_start = True min_back = None if self.args.file is not None: self.results_file = self.args.file elif self.args.path is not None: path = os.path.abspath(self.args.path) self.open_images_and_get_ready(path=path) if self.args.start: print("IMAGE_TRACKER: STARTING FROM FIRST RECORDED IMAGE") elif self.args.proceed: print("IMAGE_TRACKER: STARTING FROM IMAGE RECORDED 1 MIN AGO") min_back = -1 elif self.args.time > 0: min_back = -self.args.time[0] print("IMAGE_TRACKER: STARTING FROM IMAGE RECORDED {} MIN AGO" "".format(min_back)) else: auto_start = False else: auto_start = False # Initialize processing thread if self.args.file is None: self.proc_thread = thr.InterceptorThread( self, data_folder=self.data_folder, term_file=self.term_file, proc_params=self.params, backend=self.args.backend, n_proc=self.args.nproc, min_back=min_back, run_indexing=self.run_indexing, run_integration=self.run_integration, ) else: self.proc_thread = thr.InterceptorFileThread( self, results_file=self.args.file, reorder=self.args.reorder) if auto_start: self.start_spotfinding()
queue = None .type = str .help = Multiprocessing queue .alias = Queue submit_command = None .type = str .help = Command to submit IOTA job to a queue .alias = Submit Command kill_command = None .type = str .help = Command to kill the current IOTA job submitted to a queue .alias = Kill Command } """ master_phil = ip.parse(master_phil_str + cctbx_str, process_includes=True) def get_input_phil(paramfile=None, phil_args=None, ha14=False, gui=False): ''' Generate PHIL from file, master, and/or command arguments :param args: command line arguments :param phil_args: PHIL settings as command line arguments :param paramfile: file with input settings in PHIL format :return: ''' from libtbx.phil.command_line import argument_interpreter from libtbx.utils import Sorry # Depending on mode, either read input from file, or generate defaults if paramfile: with open(paramfile, 'r') as inpf:
phil_scope = parse(''' verbosity = 10 .type = int(value_min=0) .help = "The verbosity level" input { template = None .type = str .help = "The image sweep template" .multiple = True } output { shoeboxes = True .type = bool datablock_filename = datablock.json .type = str .help = "The filename for output datablock" strong_filename = strong.pickle .type = str .help = "The filename for strong reflections from spot finder output." indexed_filename = indexed.pickle .type = str .help = "The filename for indexed reflections." refined_experiments_filename = refined_experiments.json .type = str .help = "The filename for saving refined experimental models" integrated_filename = integrated.pickle .type = str .help = "The filename for final integrated reflections." profile_filename = profile.phil .type = str .help = "The filename for output reflection profile parameters" integration_pickle = int-%s_%d.pickle .type = str .help = Output integration results for each color data to separate cctbx.xfel-style pickle files } include scope dials.algorithms.spot_finding.factory.phil_scope include scope dials.algorithms.indexing.indexer.index_only_phil_scope include scope dials.algorithms.refinement.refiner.phil_scope include scope dials.algorithms.integration.integrator.phil_scope include scope dials.algorithms.profile_model.factory.phil_scope include scope dials.algorithms.spot_prediction.reflection_predictor.phil_scope ''', process_includes=True)
from dials.algorithms.indexing.fft3d import indexer_fft3d as indexer #from dials.algorithms.indexing.real_space_grid_search import indexer_real_space_grid_search as indexer import copy, os print target_cell,target_sg phil_scope_str=''' include scope dials.algorithms.peak_finding.spotfinder_factory.phil_scope include scope dials.algorithms.indexing.indexer.index_only_phil_scope include scope dials.algorithms.refinement.refiner.phil_scope indexing.known_symmetry.unit_cell={0} .type = unit_cell indexing.known_symmetry.space_group={1} .type = space_group ''' phil_scope = parse(phil_scope_str.format(target_cell,target_sg), process_includes=True) params = phil_scope.extract() params.refinement.parameterisation.crystal.scan_varying = False params.indexing.scan_range = [] # params.refinement.parameterisation.crystal.unit_cell.restraints.tie_to_target = [] # params.spotfinder.filter.min_spot_size=3 filenames = [] for arg in args: if "indexing.data" in arg: path = arg.split('=')[1] if os.path.isdir(path): for subfile in os.listdir(path): subpath = os.path.join(path, subfile) if os.path.isfile(subpath): filenames.append(subpath)
def write_defaults(current_path=None, txt_out=None, method='cctbx_xfel', write_target_file=True, write_param_file=True, filepath=None): """ Generates list of default parameters for a reasonable target file: - old cctbx.xfel (HA14) now deprecated, but can still be used - also writes out the IOTA parameter file. :param current_path: path to current output folder :param txt_out: text of IOTA parameters :param method: which backend is used (default = cctbx.xfel AB18) :param write_target_file: write backend parameters to file :param write_param_file: write IOTA parameters to file :return: default backend settings as list, IOTA parameters as string """ if filepath: def_target_file = filepath else: def_target_file = '{}/{}.phil' \ ''.format(current_path, method.replace('.', '_')) if method.lower() in ('cctbx', 'ha14'): # This is a deprecated backend from 2014; no longer recommended, may suck default_target = ''' # -*- mode: conf -*- difflimit_sigma_cutoff = 0.01 distl{ peak_intensity_maximum_factor=1000 spot_area_maximum_factor=20 compactness_filter=False method2_cutoff_percentage=2.5 } integration { background_factor=2 enable_one_to_one_safeguard=True model=user_supplied spotfinder_subset=spots_non-ice mask_pixel_value=-2 greedy_integration_limit=True combine_sym_constraints_and_3D_target=True spot_prediction=dials guard_width_sq=4. mosaic { refinement_target=LSQ *ML domain_size_lower_limit=4. enable_rotational_target_highsym=True } } mosaicity_limit=2.0 distl_minimum_number_spots_for_indexing=16 distl_permit_binning=False beam_search_scope=0.5 ''' elif method.lower() in ('dials', 'cctbx.xfel', 'cctbx_xfel'): default_target = ''' spotfinder { threshold { dispersion { gain = 1 sigma_strong = 3 global_threshold = 0 } } } geometry { detector { distance = None slow_fast_beam_centre = None } } indexing { refinement_protocol { d_min_start = 2.0 } basis_vector_combinations.max_combinations = 10 stills { indexer = stills method_list = fft1d real_space_grid_search } } refinement { parameterisation { auto_reduction { action=fix min_nref_per_parameter=1 } } reflections { outlier.algorithm=null weighting_strategy { override=stills delpsi_constant=1000000 } } } integration { integrator=stills profile.fitting=False background { simple { outlier { algorithm = null } } } } profile { gaussian_rs { min_spots.overall = 0 } }''' if write_target_file: with open(def_target_file, 'w') as targ: targ.write(default_target) if write_param_file: with open('{}/iota.param'.format(current_path), 'w') as default_param_file: default_param_file.write(txt_out) return ip.parse(default_target), txt_out