def run(args): log = sys.stdout if (len(args) == 0): args = ["--help"] command_line = (option_parser(usage="%s [options] pdb_file" % libtbx.env.dispatcher_name).option( None, "--buffer_layer", action="store", type="float", default=5)).process(args=args, nargs=1) pdb_inp = iotbx.pdb.input(file_name=command_line.args[0]) model = mmtbx.model.manager(model_input=pdb_inp) box = uctbx.non_crystallographic_unit_cell_with_the_sites_in_its_center( sites_cart=model.get_sites_cart(), buffer_layer=command_line.options.buffer_layer) model.set_sites_cart(box.sites_cart) # Bad hack, never repeat. In fact, all the boxing functionality should # go into mmtbx.model.manager model._crystal_symmetry = box.crystal_symmetry() print('REMARK %s --buffer-layer=%.6g %s' % (libtbx.env.dispatcher_name, command_line.options.buffer_layer, show_string(command_line.args[0])), file=log) print('REMARK %s' % date_and_time(), file=log) print(model.model_as_pdb(), file=log)
def run(args): log = sys.stdout if (len(args) == 0): args = ["--help"] command_line = (option_parser(usage="%s [options] pdb_file" % libtbx.env.dispatcher_name).option( None, "--buffer_layer", action="store", type="float", default=5)).process(args=args, nargs=1) pdb_inp = iotbx.pdb.input(file_name=command_line.args[0]) atoms = pdb_inp.atoms() box = uctbx.non_crystallographic_unit_cell_with_the_sites_in_its_center( sites_cart=atoms.extract_xyz(), buffer_layer=command_line.options.buffer_layer) atoms.set_xyz(new_xyz=box.sites_cart) print >> log, 'REMARK %s --buffer-layer=%.6g %s' % ( libtbx.env.dispatcher_name, command_line.options.buffer_layer, show_string(command_line.args[0])) print >> log, 'REMARK %s' % date_and_time() iotbx.pdb.write_whole_pdb_file( output_file=log, pdb_hierarchy=pdb_inp.construct_hierarchy(), crystal_symmetry=box.crystal_symmetry(), ss_annotation=pdb_inp.extract_secondary_structure(log=null_out()))
def run(args): if (len(args) == 0): args = ["--help"] from libtbx.option_parser import option_parser import libtbx.load_env command_line = (option_parser( usage="%s [options] pdb_file" % libtbx.env.dispatcher_name) .option(None, "--buffer_layer", action="store", type="float", default=5) ).process(args=args, nargs=1) import iotbx.pdb pdb_inp = iotbx.pdb.input(file_name=command_line.args[0]) atoms = pdb_inp.atoms() from cctbx import uctbx box = uctbx.non_crystallographic_unit_cell_with_the_sites_in_its_center( sites_cart=atoms.extract_xyz(), buffer_layer=command_line.options.buffer_layer) atoms.set_xyz(new_xyz=box.sites_cart) from libtbx.str_utils import show_string print 'REMARK %s --buffer-layer=%.6g %s' % ( libtbx.env.dispatcher_name, command_line.options.buffer_layer, show_string(command_line.args[0])) from libtbx.utils import date_and_time print 'REMARK %s' % date_and_time() print iotbx.pdb.format_cryst1_record(crystal_symmetry=box.crystal_symmetry()) print pdb_inp.construct_hierarchy().as_pdb_string(append_end=True),
def write_geo(label, geo, geo_file_name): from libtbx.utils import date_and_time header = "# %sgeometry restraints for file:\n" % label header += "# %s\n# %s\n" % (show_string(pdb_file_name), date_and_time()) geo.write_geo_file(sites_cart=sites_cart, site_labels=site_labels, file_name=geo_file_name, header=header)
def write_geo(label, geo, geo_file_name): from libtbx.utils import date_and_time header = "# %sgeometry restraints for file:\n" % label header += "# %s\n# %s\n" % (show_string(pdb_file_name), date_and_time()) geo.write_geo_file( sites_cart=sites_cart, site_labels=site_labels, file_name=geo_file_name, header=header)
def __init__(self, params, coeffs, atom_selection_manager=None, xray_structure=None): adopt_init_args(self, locals()) fft_map = coeffs.fft_map( resolution_factor=self.params.grid_resolution_factor) if (self.params.scale == "volume"): fft_map.apply_volume_scaling() elif (self.params.scale == "sigma"): fft_map.apply_sigma_scaling() else: raise RuntimeError title_lines = [ "REMARK file: %s" % show_string(os.path.basename(self.params.file_name)) ] title_lines.append("REMARK directory: %s" % show_string(os.path.dirname(self.params.file_name))) title_lines.append("REMARK %s" % date_and_time()) assert self.params.region in ["selection", "cell"] if (self.params.region == "selection" and xray_structure is not None): map_iselection = None if atom_selection_manager is not None: map_iselection = self.atom_iselection() frac_min, frac_max = self.box_around_selection( iselection=map_iselection, buffer=self.params.atom_selection_buffer) n_real = fft_map.n_real() gridding_first = [ifloor(f * n) for f, n in zip(frac_min, n_real)] gridding_last = [iceil(f * n) for f, n in zip(frac_max, n_real)] title_lines.append('REMARK map around selection') title_lines.append('REMARK atom_selection=%s' % show_string(self.params.atom_selection)) title_lines.append('REMARK atom_selection_buffer=%.6g' % self.params.atom_selection_buffer) if (map_iselection is None): sel_size = self.xray_structure.scatterers().size() else: sel_size = map_iselection.size() title_lines.append('REMARK number of atoms selected: %d' % sel_size) else: gridding_first = None gridding_last = None title_lines.append("REMARK map covering the unit cell") if params.format == "xplor": fft_map.as_xplor_map(file_name=self.params.file_name, title_lines=title_lines, gridding_first=gridding_first, gridding_last=gridding_last) else: fft_map.as_ccp4_map(file_name=self.params.file_name, gridding_first=gridding_first, gridding_last=gridding_last, labels=title_lines)
def __init__(self, params, coeffs, atom_selection_manager=None, xray_structure=None): adopt_init_args(self, locals()) fft_map = coeffs.fft_map(resolution_factor = self.params.grid_resolution_factor) if(self.params.scale == "volume"): fft_map.apply_volume_scaling() elif(self.params.scale == "sigma"): fft_map.apply_sigma_scaling() else: raise RuntimeError title_lines=["REMARK file: %s" % show_string(os.path.basename(self.params.file_name))] title_lines.append("REMARK directory: %s" % show_string(os.path.dirname(self.params.file_name))) title_lines.append("REMARK %s" % date_and_time()) assert self.params.region in ["selection", "cell"] if(self.params.region == "selection" and xray_structure is not None) : map_iselection = None if atom_selection_manager is not None : map_iselection = self.atom_iselection() frac_min, frac_max = self.box_around_selection( iselection = map_iselection, buffer = self.params.atom_selection_buffer) n_real = fft_map.n_real() gridding_first=[ifloor(f*n) for f,n in zip(frac_min,n_real)] gridding_last=[iceil(f*n) for f,n in zip(frac_max,n_real)] title_lines.append('REMARK map around selection') title_lines.append('REMARK atom_selection=%s' % show_string(self.params.atom_selection)) title_lines.append('REMARK atom_selection_buffer=%.6g' % self.params.atom_selection_buffer) if(map_iselection is None): sel_size = self.xray_structure.scatterers().size() else: sel_size = map_iselection.size() title_lines.append('REMARK number of atoms selected: %d' % sel_size) else: gridding_first = None gridding_last = None title_lines.append("REMARK map covering the unit cell") if params.format == "xplor" : fft_map.as_xplor_map( file_name = self.params.file_name, title_lines = title_lines, gridding_first = gridding_first, gridding_last = gridding_last) else : fft_map.as_ccp4_map( file_name = self.params.file_name, gridding_first = gridding_first, gridding_last = gridding_last, labels=title_lines)
def write_mtz_file(self, file_name, mtz_history_buffer = None, r_free_flags=None): from cctbx.array_family import flex if(self.mtz_dataset is not None): if (r_free_flags is not None) : self.mtz_dataset.add_miller_array(r_free_flags, column_root_label="FreeR_flag") if(mtz_history_buffer is None): mtz_history_buffer = flex.std_string() mtz_history_buffer.append(date_and_time()) mtz_history_buffer.append("> file name: %s" % os.path.basename(file_name)) mtz_object = self.mtz_dataset.mtz_object() mtz_object.add_history(mtz_history_buffer) mtz_object.write(file_name = file_name) return True return False
def run( args, command_name="phenix.reflection_file_converter", simply_return_all_miller_arrays=False): command_line = (option_parser( usage="%s [options] reflection_file ..." % command_name, description="Example: %s w1.sca --mtz ." % command_name) .enable_symmetry_comprehensive() .option(None, "--weak_symmetry", action="store_true", default=False, help="symmetry on command line is weaker than symmetry found in files") .enable_resolutions() .option(None, "--label", action="store", type="string", help="Substring of reflection data label or number", metavar="STRING") .option(None, "--non_anomalous", action="store_true", default=False, help="Averages Bijvoet mates to obtain a non-anomalous array") .option(None, "--r_free_label", action="store", type="string", help="Substring of reflection data label or number", metavar="STRING") .option(None, "--r_free_test_flag_value", action="store", type="int", help="Value in R-free array indicating assignment to free set.", metavar="FLOAT") .option(None, "--generate_r_free_flags", action="store_true", default=False, help="Generates a new array of random R-free flags" " (MTZ and CNS output only).") .option(None, "--use_lattice_symmetry_in_r_free_flag_generation", dest="use_lattice_symmetry_in_r_free_flag_generation", action="store_true", default=True, help="group twin/pseudo symmetry related reflections together" " in r-free set (this is the default).") .option(None, "--no_lattice_symmetry_in_r_free_flag_generation", dest="use_lattice_symmetry_in_r_free_flag_generation", action="store_false", help="opposite of --use-lattice-symmetry-in-r-free-flag-generation") .option(None, "--r_free_flags_fraction", action="store", default=0.10, type="float", help="Target fraction free/work reflections (default: 0.10).", metavar="FLOAT") .option(None, "--r_free_flags_max_free", action="store", default=2000, type="int", help="Maximum number of free reflections (default: 2000).", metavar="INT") .option(None, "--r_free_flags_format", choices=("cns", "ccp4", "shelx"), default="cns", help="Convention for generating R-free flags", metavar="cns|ccp4") .option(None, "--output_r_free_label", action="store", type="string", help="Label for newly generated R-free flags (defaults to R-free-flags)", default="R-free-flags", metavar="STRING") .option(None, "--random_seed", action="store", type="int", help="Seed for random number generator (affects generation of" " R-free flags).", metavar="INT") .option(None, "--change_of_basis", action="store", type="string", help="Change-of-basis operator: h,k,l or x,y,z" " or to_reference_setting, to_primitive_setting, to_niggli_cell," " to_inverse_hand", metavar="STRING") .option(None, "--eliminate_invalid_indices", action="store_true", default=False, help="Remove indices which are invalid given the change of basis desired") .option(None, "--expand_to_p1", action="store_true", default=False, help="Generates all symmetrically equivalent reflections." " The space group symmetry is reset to P1." " May be used in combination with --change_to_space_group to" " lower the symmetry.") .option(None, "--change_to_space_group", action="store", type="string", help="Changes the space group and merges equivalent reflections" " if necessary", metavar="SYMBOL|NUMBER") .option(None, "--write_mtz_amplitudes", action="store_true", default=False, help="Converts intensities to amplitudes before writing MTZ format;" " requires --mtz_root_label") .option(None, "--write_mtz_intensities", action="store_true", default=False, help="Converts amplitudes to intensities before writing MTZ format;" " requires --mtz_root_label") .option(None,"--remove_negatives", action="store_true", default=False, help="Remove negative intensities or amplitudes from the data set" ) .option(None,"--massage_intensities", action="store_true", default=False, help="'Treat' negative intensities to get a positive amplitude." " |Fnew| = sqrt((Io+sqrt(Io**2 +2sigma**2))/2.0). Requires" " intensities as input and the flags --mtz," " --write_mtz_amplitudes and --mtz_root_label.") .option(None, "--scale_max", action="store", type="float", help="Scales data such that the maximum is equal to the given value", metavar="FLOAT") .option(None, "--scale_factor", action="store", type="float", help="Multiplies data with the given factor", metavar="FLOAT") .option(None, "--sca", action="store", type="string", help= "write data to Scalepack FILE ('--sca .' copies name of input file)", metavar="FILE") .option(None, "--mtz", action="store", type="string", help="write data to MTZ FILE ('--mtz .' copies name of input file)", metavar="FILE") .option(None, "--mtz_root_label", action="store", type="string", help="Root label for MTZ file (e.g. Fobs)", metavar="STRING") .option(None, "--cns", action="store", type="string", help="write data to CNS FILE ('--cns .' copies name of input file)", metavar="FILE") .option(None, "--shelx", action="store", type="string", help="write data to SHELX FILE ('--shelx .' copies name of input file)", metavar="FILE") ).process(args=args) co = command_line.options if (co.random_seed is not None): random.seed(co.random_seed) flex.set_random_seed(value=co.random_seed) if ( co.write_mtz_amplitudes and co.write_mtz_intensities): print print "--write_mtz_amplitudes and --write_mtz_intensities" \ " are mutually exclusive." print return None if ( co.write_mtz_amplitudes or co.write_mtz_intensities): if (co.mtz_root_label is None): print print "--write_mtz_amplitudes and --write_mtz_intensities" \ " require --mtz_root_label." print return None if ( co.scale_max is not None and co.scale_factor is not None): print print "--scale_max and --scale_factor are mutually exclusive." print return None if (len(command_line.args) == 0): command_line.parser.show_help() return None all_miller_arrays = reflection_file_reader.collect_arrays( file_names=command_line.args, crystal_symmetry=None, force_symmetry=False, merge_equivalents=False, discard_arrays=False, verbose=1) if (simply_return_all_miller_arrays): return all_miller_arrays if (len(all_miller_arrays) == 0): print print "No reflection data found in input file%s." % ( plural_s(len(command_line.args))[1]) print return None label_table = reflection_file_utils.label_table( miller_arrays=all_miller_arrays) selected_array = label_table.select_array( label=co.label, command_line_switch="--label") if (selected_array is None): return None r_free_flags = None r_free_info = None if (co.r_free_label is not None): r_free_flags = label_table.match_data_label( label=co.r_free_label, command_line_switch="--r_free_label") if (r_free_flags is None): return None r_free_info = str(r_free_flags.info()) if (not r_free_flags.is_bool_array()): test_flag_value = reflection_file_utils.get_r_free_flags_scores( miller_arrays=[r_free_flags], test_flag_value=co.r_free_test_flag_value).test_flag_values[0] if (test_flag_value is None): if (co.r_free_test_flag_value is None): raise Sorry( "Cannot automatically determine r_free_test_flag_value." " Please use --r_free_test_flag_value to specify a value.") else: raise Sorry("Invalid --r_free_test_flag_value.") r_free_flags = r_free_flags.customized_copy( data=(r_free_flags.data() == test_flag_value)) print "Selected data:" print " ", selected_array.info() print " Observation type:", selected_array.observation_type() print if (r_free_info is not None): print "R-free flags:" print " ", r_free_info print processed_array = selected_array.customized_copy( crystal_symmetry=selected_array.join_symmetry( other_symmetry=command_line.symmetry, force=not co.weak_symmetry)).set_observation_type( selected_array.observation_type()) if (r_free_flags is not None): r_free_flags = r_free_flags.customized_copy( crystal_symmetry=processed_array) print "Input crystal symmetry:" crystal.symmetry.show_summary(processed_array, prefix=" ") print if (processed_array.unit_cell() is None): command_line.parser.show_help() print "Unit cell parameters unknown. Please use --symmetry or --unit_cell." print return None if (processed_array.space_group_info() is None): command_line.parser.show_help() print "Space group unknown. Please use --symmetry or --space_group." print return None if (r_free_flags is not None): r_free_flags = r_free_flags.customized_copy( crystal_symmetry=processed_array) if (co.change_of_basis is not None): processed_array, cb_op = processed_array.apply_change_of_basis( change_of_basis=co.change_of_basis, eliminate_invalid_indices=co.eliminate_invalid_indices) if (r_free_flags is not None): r_free_flags = r_free_flags.change_basis(cb_op=cb_op) if (not processed_array.is_unique_set_under_symmetry()): print "Merging symmetry-equivalent values:" merged = processed_array.merge_equivalents() merged.show_summary(prefix=" ") print processed_array = merged.array() del merged processed_array.show_comprehensive_summary(prefix=" ") print if (r_free_flags is not None and not r_free_flags.is_unique_set_under_symmetry()): print "Merging symmetry-equivalent R-free flags:" merged = r_free_flags.merge_equivalents() merged.show_summary(prefix=" ") print r_free_flags = merged.array() del merged r_free_flags.show_comprehensive_summary(prefix=" ") print if (co.expand_to_p1): print "Expanding symmetry and resetting space group to P1:" if (r_free_flags is not None): raise Sorry( "--expand_to_p1 not supported for arrays of R-free flags.") processed_array = processed_array.expand_to_p1() processed_array.show_comprehensive_summary(prefix=" ") print if (co.change_to_space_group is not None): if (r_free_flags is not None): raise Sorry( "--change_to_space_group not supported for arrays of R-free flags.") new_space_group_info = sgtbx.space_group_info( symbol=co.change_to_space_group) print "Change to space group:", new_space_group_info new_crystal_symmetry = crystal.symmetry( unit_cell=processed_array.unit_cell(), space_group_info=new_space_group_info, assert_is_compatible_unit_cell=False) if (not new_crystal_symmetry.unit_cell() .is_similar_to(processed_array.unit_cell())): print " *************" print " W A R N I N G" print " *************" print " Unit cell parameters adapted to new space group symmetry are" print " significantly different from input unit cell parameters:" print " Input unit cell parameters:", \ processed_array.unit_cell() print " Adapted unit cell parameters:", \ new_crystal_symmetry.unit_cell() processed_array = processed_array.customized_copy( crystal_symmetry=new_crystal_symmetry) print if (not processed_array.is_unique_set_under_symmetry()): print " Merging values symmetry-equivalent under new symmetry:" merged = processed_array.merge_equivalents() merged.show_summary(prefix=" ") print processed_array = merged.array() del merged processed_array.show_comprehensive_summary(prefix=" ") print if (processed_array.anomalous_flag() and co.non_anomalous): print "Converting data array from anomalous to non-anomalous." if (not processed_array.is_xray_intensity_array()): processed_array = processed_array.average_bijvoet_mates() else: processed_array = processed_array.f_sq_as_f() processed_array = processed_array.average_bijvoet_mates() processed_array = processed_array.f_as_f_sq() processed_array.set_observation_type_xray_intensity() if (r_free_flags is not None and r_free_flags.anomalous_flag() and co.non_anomalous): print "Converting R-free flags from anomalous to non-anomalous." r_free_flags = r_free_flags.average_bijvoet_mates() d_max = co.low_resolution d_min = co.resolution if (d_max is not None or d_min is not None): if (d_max is not None): print "Applying low resolution cutoff: d_max=%.6g" % d_max if (d_min is not None): print "Applying high resolution cutoff: d_min=%.6g" % d_min processed_array = processed_array.resolution_filter( d_max=d_max, d_min=d_min) print "Number of reflections:", processed_array.indices().size() print if (co.scale_max is not None): print "Scaling data such that the maximum value is: %.6g" % co.scale_max processed_array = processed_array.apply_scaling(target_max=co.scale_max) print if (co.scale_factor is not None): print "Multiplying data with the factor: %.6g" % co.scale_factor processed_array = processed_array.apply_scaling(factor=co.scale_factor) print if (([co.remove_negatives, co.massage_intensities]).count(True) == 2): raise Sorry( "It is not possible to use --remove_negatives and" " --massage_intensities at the same time.") if (co.remove_negatives): if processed_array.is_real_array(): print "Removing negatives items" processed_array = processed_array.select( processed_array.data() > 0) if processed_array.sigmas() is not None: processed_array = processed_array.select( processed_array.sigmas() > 0) else: raise Sorry("--remove_negatives not applicable to complex data arrays.") if (co.massage_intensities): if processed_array.is_real_array(): if processed_array.is_xray_intensity_array(): if (co.mtz is not None): if (co.write_mtz_amplitudes): print "The supplied intensities will be used to estimate" print " amplitudes in the following way: " print " Fobs = Sqrt[ (Iobs + Sqrt(Iobs**2 + 2sigmaIobs**2))/2 ]" print " Sigmas are estimated in a similar manner." print processed_array = processed_array.enforce_positive_amplitudes() else: raise Sorry( "--write_mtz_amplitudes has to be specified when using" " --massage_intensities") else: raise Sorry("--mtz has to be used when using --massage_intensities") else: raise Sorry( "Intensities must be supplied when using the option" " --massage_intensities") else: raise Sorry( "--massage_intensities not applicable to complex data arrays.") if (not co.generate_r_free_flags): if (r_free_flags is None): r_free_info = [] else: if (r_free_flags.anomalous_flag() != processed_array.anomalous_flag()): if (processed_array.anomalous_flag()): is_not = ("", " not") else: is_not = (" not", "") raise Sorry( "The data array is%s anomalous but the R-free array is%s.\n" % is_not + " Please try --non_anomalous.") r_free_info = ["R-free flags source: " + r_free_info] if (not r_free_flags.indices().all_eq(processed_array.indices())): processed_array = processed_array.map_to_asu() r_free_flags = r_free_flags.map_to_asu().common_set(processed_array) n_missing_r_free_flags = processed_array.indices().size() \ - r_free_flags.indices().size() if (n_missing_r_free_flags != 0): raise Sorry("R-free flags not compatible with data array:" " missing flag for %d reflections selected for output." % n_missing_r_free_flags) else: if (r_free_flags is not None): raise Sorry( "--r_free_label and --generate_r_free_flags are mutually exclusive.") print "Generating a new array of R-free flags:" r_free_flags = processed_array.generate_r_free_flags( fraction=co.r_free_flags_fraction, max_free=co.r_free_flags_max_free, use_lattice_symmetry=co.use_lattice_symmetry_in_r_free_flag_generation, format=co.r_free_flags_format) test_flag_value = True if (co.r_free_flags_format == "ccp4") : test_flag_value = 0 elif (co.r_free_flags_format == "shelx") : test_flag_value = -1 r_free_as_bool = r_free_flags.customized_copy( data=r_free_flags.data()==test_flag_value) r_free_info = [ "R-free flags generated by %s:" % command_name] r_free_info.append(" "+date_and_time()) r_free_info.append(" fraction: %.6g" % co.r_free_flags_fraction) r_free_info.append(" max_free: %s" % str(co.r_free_flags_max_free)) r_free_info.append(" size of work set: %d" % r_free_as_bool.data().count(False)) r_free_info.append(" size of free set: %d" % r_free_as_bool.data().count(True)) r_free_info_str = StringIO() r_free_as_bool.show_r_free_flags_info(prefix=" ", out=r_free_info_str) if (co.r_free_flags_format == "ccp4") : r_free_info.append(" convention: CCP4 (test=0, work=1-%d)" % flex.max(r_free_flags.data())) elif (co.r_free_flags_format == "shelx") : r_free_info.append(" convention: SHELXL (test=-1, work=1)") else : r_free_info.append(" convention: CNS/X-PLOR (test=1, work=0)") print "\n".join(r_free_info[2:4]) print r_free_info[-1] print r_free_info_str.getvalue() print n_output_files = 0 if (co.sca is not None): if (co.generate_r_free_flags): raise Sorry("Cannot write R-free flags to Scalepack file.") file_name = reflection_file_utils.construct_output_file_name( input_file_names=[selected_array.info().source], user_file_name=co.sca, file_type_label="Scalepack", file_extension="sca") print "Writing Scalepack file:", file_name iotbx.scalepack.merge.write( file_name=file_name, miller_array=processed_array) n_output_files += 1 print if (co.mtz is not None): file_name = reflection_file_utils.construct_output_file_name( input_file_names=[selected_array.info().source], user_file_name=co.mtz, file_type_label="MTZ", file_extension="mtz") print "Writing MTZ file:", file_name mtz_history_buffer = flex.std_string() mtz_history_buffer.append(date_and_time()) mtz_history_buffer.append("> program: %s" % command_name) mtz_history_buffer.append("> input file name: %s" % os.path.basename(selected_array.info().source)) mtz_history_buffer.append("> input directory: %s" % os.path.dirname(os.path.abspath(selected_array.info().source))) mtz_history_buffer.append("> input labels: %s" % selected_array.info().label_string()) mtz_output_array = processed_array if (co.write_mtz_amplitudes): if (not mtz_output_array.is_xray_amplitude_array()): print " Converting intensities to amplitudes." mtz_output_array = mtz_output_array.f_sq_as_f() mtz_history_buffer.append("> Intensities converted to amplitudes.") elif (co.write_mtz_intensities): if (not mtz_output_array.is_xray_intensity_array()): print " Converting amplitudes to intensities." mtz_output_array = mtz_output_array.f_as_f_sq() mtz_history_buffer.append("> Amplitudes converted to intensities.") column_root_label = co.mtz_root_label if (column_root_label is None): # XXX 2013-03-29: preserve original root label by default # XXX 2014-12-16: skip trailing "(+)" in root_label if anomalous column_root_label = selected_array.info().labels[0] column_root_label=remove_anomalous_suffix_if_necessary( miller_array=selected_array, column_root_label=column_root_label) mtz_dataset = mtz_output_array.as_mtz_dataset( column_root_label=column_root_label) del mtz_output_array if (r_free_flags is not None): mtz_dataset.add_miller_array( miller_array=r_free_flags, column_root_label=co.output_r_free_label) for line in r_free_info: mtz_history_buffer.append("> " + line) mtz_history_buffer.append("> output file name: %s" % os.path.basename(file_name)) mtz_history_buffer.append("> output directory: %s" % os.path.dirname(os.path.abspath(file_name))) mtz_object = mtz_dataset.mtz_object() mtz_object.add_history(mtz_history_buffer) mtz_object.write(file_name=file_name) n_output_files += 1 print if (co.cns is not None): file_name = reflection_file_utils.construct_output_file_name( input_file_names=[selected_array.info().source], user_file_name=co.cns, file_type_label="CNS", file_extension="cns") print "Writing CNS file:", file_name processed_array.export_as_cns_hkl( file_object=open(file_name, "w"), file_name=file_name, info=["source of data: "+str(selected_array.info())] + r_free_info, r_free_flags=r_free_flags) n_output_files += 1 print if (co.shelx is not None): if (co.generate_r_free_flags): raise Sorry("Cannot write R-free flags to SHELX file.") file_name = reflection_file_utils.construct_output_file_name( input_file_names=[selected_array.info().source], user_file_name=co.shelx, file_type_label="SHELX", file_extension="shelx") print "Writing SHELX file:", file_name processed_array.as_amplitude_array().export_as_shelx_hklf( open(file_name, "w")) n_output_files += 1 print if (n_output_files == 0): command_line.parser.show_help() print "Please specify at least one output file format,", print "e.g. --mtz, --sca, etc." print return None return processed_array
def run(args): from iotbx.option_parser import option_parser as iotbx_option_parser import libtbx.utils show_times = libtbx.utils.show_times(time_start="now") command_call = ["iotbx.python", __file__] command_line = (iotbx_option_parser( usage=" ".join(command_call) + " [options] directory|file...").enable_chunk( easy_all=True).enable_multiprocessing()).process(args=args, min_nargs=1) if (command_line.run_multiprocessing_chunks_if_applicable( command_call=command_call)): show_times() return co = command_line.options # print "TIME BEGIN cod_refine:", date_and_time() print # master_phil = get_master_phil() argument_interpreter = master_phil.command_line_argument_interpreter() phil_objects = [] remaining_args = [] for arg in command_line.args: if (arg.find("=") >= 0): phil_objects.append(argument_interpreter.process(arg=arg)) else: remaining_args.append(arg) work_phil = master_phil.fetch(sources=phil_objects) work_phil.show() print params = work_phil.extract() # qi_dict = {} all_pickles = [] for arg in remaining_args: if (op.isdir(arg)): for node in sorted(os.listdir(arg)): if (node.endswith(".pickle")): all_pickles.append(op.join(arg, node)) elif (node.startswith("qi_") and len(node) == 10): qi = open(op.join(arg, node)).read().splitlines() if (len(qi) == 1): cod_id = node[3:] quick_info = eval(qi[0]) assert cod_id not in qi_dict qi_dict[cod_id] = quick_info elif (op.isfile(arg)): all_pickles.append(arg) else: raise RuntimeError("Not a file or directory: %s" % arg) print "Number of pickle files:", len(all_pickles) print "Number of quick_infos:", len(qi_dict) sort_choice = params.sorting_of_pickle_files if (len(qi_dict) != 0 and sort_choice is not None): print "Sorting pickle files by n_atoms * n_refl:", sort_choice assert sort_choice in ["down", "up"] def sort_pickle_files(): if (sort_choice == "down"): i_sign = -1 else: i_sign = 1 buffer = [] for i, path in enumerate(all_pickles): cod_id = op.basename(path).split(".", 1)[0] qi = qi_dict.get(cod_id) if (qi is None): nn = 2**31 else: nn = qi[0] * qi[1] * qi[2] buffer.append((nn, i_sign * i, path)) buffer.sort() if (i_sign < 0): buffer.reverse() result = [] for elem in buffer: result.append(elem[-1]) return result all_pickles = sort_pickle_files() print # rss = params.random_subset.size if (rss is not None and rss > 0): seed = params.random_subset.seed print "Selecting subset of %d pickle files using random seed %d" % ( rss, seed) mt = flex.mersenne_twister(seed=seed) perm = mt.random_permutation(size=len(all_pickles))[:rss] flags = flex.bool(len(all_pickles), False).set_selected(perm, True) all_pickles = flex.select(all_pickles, permutation=flags.iselection()) print # from libtbx.path import makedirs_race if (params.wdir_root is not None): makedirs_race(path=params.wdir_root) if (params.pickle_refined_dir is not None): makedirs_race(path=params.pickle_refined_dir) # n_caught = 0 for i_pickle, pickle_file_name in enumerate(all_pickles): if (i_pickle % command_line.chunk.n != command_line.chunk.i): continue tm = user_plus_sys_time() try: process(params, pickle_file_name) except KeyboardInterrupt: print >> sys.stderr, "CAUGHT EXCEPTION: KeyboardInterrupt" traceback.print_exc() print >> sys.stderr sys.stderr.flush() return except Exception: sys.stdout.flush() print >> sys.stderr, "CAUGHT EXCEPTION: %s" % pickle_file_name traceback.print_exc() print >> sys.stderr sys.stderr.flush() n_caught += 1 else: print "done_with: %s (%.2f seconds)" % (pickle_file_name, tm.elapsed()) print sys.stdout.flush() print print "Number of exceptions caught:", n_caught # show_times() print print "TIME END cod_refine:", date_and_time()
def run(args): from iotbx.option_parser import option_parser as iotbx_option_parser import libtbx.utils show_times = libtbx.utils.show_times(time_start="now") command_call = ["iotbx.python", __file__] command_line = (iotbx_option_parser( usage=" ".join(command_call) + " [options] directory|file...") .enable_chunk(easy_all=True) .enable_multiprocessing() ).process(args=args, min_nargs=1) if (command_line.run_multiprocessing_chunks_if_applicable( command_call=command_call)): show_times() return co = command_line.options # print "TIME BEGIN pdb_dev:", date_and_time() print libtbx.utils.host_and_user().show() print sys.stdout.flush() # from cctbx.omz import cod_refine master_phil = cod_refine.get_master_phil( max_atoms=None, f_calc_options_algorithm="direct *fft", bulk_solvent_correction=True) argument_interpreter = master_phil.command_line_argument_interpreter() phil_objects = [] remaining_args = [] for arg in command_line.args: if (arg.find("=") >= 0): phil_objects.append(argument_interpreter.process(arg=arg)) else: remaining_args.append(arg) work_phil = master_phil.fetch(sources=phil_objects) work_phil.show() print params = work_phil.extract() # mtz_pdb_pairs = [] arg_iter = iter(remaining_args) pdb_v3_mirror_dir = os.environ.get("PDB_MIRROR_PDB") assert pdb_v3_mirror_dir is None or op.isdir(pdb_v3_mirror_dir) cci_pdbmtz_path = os.environ.get("CCI_PDBMTZ") assert cci_pdbmtz_path is None or op.isdir(cci_pdbmtz_path) for arg in arg_iter: def get_next(expected_exts): def raise_bad_file(what, fn=None): msg = "%s file name (%s expected)" % (what, " or ".join(expected_exts)) if (fn is None): msg += "." else: msg += ": " + show_string(fn) raise RuntimeError(msg) try: arg = arg_iter.next() except StopIteration: raise_bad_file("Missing") if (not arg.endswith(tuple(expected_exts))): raise_bad_file("Unexpected", arg) return arg if (op.isfile(arg) and arg.endswith((".mtz", ".pdb", ".ent"))): if (arg.endswith(".mtz")): fn_mtz = arg fn_pdb = get_next([".pdb", ".ent"]) else: fn_pdb = arg fn_mtz = get_next([".mtz"]) else: fn_mtz = arg+".mtz" def raise_mtz_but_no_pdb(): raise RuntimeError( "MTZ file found but no PDB file: %s" % show_string(fn_mtz)) if (op.isfile(fn_mtz)): for ext in [".pdb", ".ent"]: fn_pdb = arg+ext if (op.isfile(fn_pdb)): break else: raise_mtz_but_no_pdb() else: fn_mtz = op.join(cci_pdbmtz_path, arg+".mtz") if (not op.isfile(fn_mtz)): raise RuntimeError( "MTZ file not found: %s" % show_string(fn_mtz)) fn_pdb = op.join(pdb_v3_mirror_dir, arg[1:3], "pdb"+arg+".ent.gz") if (not op.isfile(fn_pdb)): raise_mtz_but_no_pdb() mtz_pdb_pairs.append((fn_mtz, fn_pdb)) # n_caught = 0 for i_pair,mtz_pdb_pair in enumerate(mtz_pdb_pairs): if (i_pair % command_line.chunk.n != command_line.chunk.i): continue tm = user_plus_sys_time() try: process(params, mtz_pdb_pair) except KeyboardInterrupt: print >> sys.stderr, "CAUGHT EXCEPTION: KeyboardInterrupt" traceback.print_exc() print >> sys.stderr sys.stderr.flush() return except Exception: sys.stdout.flush() print >> sys.stderr, "CAUGHT EXCEPTION: %s" % ", ".join(mtz_pdb_pair) traceback.print_exc() print >> sys.stderr sys.stderr.flush() n_caught += 1 else: print "done_with: %s, %s (%.2f seconds)" % ( mtz_pdb_pair + (tm.elapsed(),)) print sys.stdout.flush() print print "Number of exceptions caught:", n_caught # show_times() print print "TIME END pdb_dev:", date_and_time() sys.stdout.flush()
def run(args): if len(args)==0: master_params.show(expert_level=100) elif ( "--help" in args ): print "no help available" elif ( "--h" in args ): print "no help available" elif ( "--show_defaults" in args ): master_params.show(expert_level=0) elif ( "--show_defaults_all" in args ): master_params.show(expert_level=10) else: log = multi_out() if (not "--quiet" in args): log.register(label="stdout", file_object=sys.stdout) string_buffer = StringIO() string_buffer_plots = StringIO() log.register(label="log_buffer", file_object=string_buffer) log_plots = StringIO() print >> log,"#phil __OFF__" print >> log print >> log, date_and_time() print >> log print >> log phil_objects = [] argument_interpreter = master_params.command_line_argument_interpreter( home_scope="scaling") reflection_file = None for arg in args: command_line_params = None arg_is_processed = False if arg == '--quiet': arg_is_processed = True ## The associated action with this keyword is implemented above if (os.path.isfile(arg)): ## is this a file name? ## Check if this is a phil file try: command_line_params = iotbx.phil.parse(file_name=arg) except KeyboardInterrupt: raise except Exception : pass if command_line_params is not None: phil_objects.append(command_line_params) arg_is_processed = True ## Check if this file is a reflection file if command_line_params is None: reflection_file = reflection_file_reader.any_reflection_file( file_name=arg, ensure_read_access=False) if (reflection_file is not None): reflection_file = arg arg_is_processed = True ## If it is not a file, it must be a phil command else: try: command_line_params = argument_interpreter.process(arg=arg) if command_line_params is not None: phil_objects.append(command_line_params) arg_is_processed = True except KeyboardInterrupt: raise except Exception : pass if not arg_is_processed: print >> log, "##----------------------------------------------##" print >> log, "## Unknown phil-file or phil-command:", arg print >> log, "##----------------------------------------------##" print >> log raise Sorry("Unknown file format or phil command: %s" % arg) effective_params = master_params.fetch(sources=phil_objects) params = effective_params.extract() ## Now please read in the reflections files ## get symmetry and cell data first please ## By default, the native cell and symmetry are used ## as reference crystal_symmetry_nat = None print params.scaling.input.xray_data.wavelength1.file_name crystal_symmetry_nat = crystal_symmetry_from_any.extract_from( file_name=params.scaling.input.xray_data.wavelength1.file_name) if params.scaling.input.xray_data.space_group is None: params.scaling.input.xray_data.space_group =\ crystal_symmetry_nat.space_group_info() print >> log, "Using symmetry of native data" if params.scaling.input.xray_data.unit_cell is None: params.scaling.input.xray_data.unit_cell =\ crystal_symmetry_nat.unit_cell() print >> log, "Using cell of native data" ## Check if a unit cell is defined if params.scaling.input.xray_data.space_group is None: raise Sorry("No space group defined") if params.scaling.input.xray_data.unit_cell is None: raise Sorry("No unit cell defined") crystal_symmetry = crystal_symmetry = crystal.symmetry( unit_cell = params.scaling.input.xray_data.unit_cell, space_group_symbol = str( params.scaling.input.xray_data.space_group) ) effective_params = master_params.fetch(sources=phil_objects) new_params = master_params.format(python_object=params) print >> log, "Effective parameters" print >> log, "#phil __ON__" new_params.show(out=log,expert_level=params.scaling.input.expert_level) print >> log, "#phil __END__" print >> log ## define a xray data server xray_data_server = reflection_file_utils.reflection_file_server( crystal_symmetry = crystal_symmetry, force_symmetry = True, reflection_files=[]) ## Read in native data and make appropriate selections miller_array_w1 = None miller_array_w1 = xray_data_server.get_xray_data( file_name = params.scaling.input.xray_data.wavelength1.file_name, labels = params.scaling.input.xray_data.wavelength1.labels, ignore_all_zeros = True, parameter_scope = 'scaling.input.SIR_scale.xray_data.native' ) info_native = miller_array_w1.info() miller_array_w1=miller_array_w1.map_to_asu().select( miller_array_w1.indices()!=(0,0,0) ) miller_array_w1 = miller_array_w1.select( miller_array_w1.data() > 0 ) ## Convert to amplitudes if (miller_array_w1.is_xray_intensity_array()): miller_array_w1 = miller_array_w1.f_sq_as_f() elif (miller_array_w1.is_complex_array()): miller_array_w1 = abs(miller_array_w1) if not miller_array_w1.is_real_array(): raise Sorry("miller_array_native is not a real array") miller_array_w1.set_info(info = info_native) ## Read in derivative data and make appropriate selections miller_array_w2 = None miller_array_w2 = xray_data_server.get_xray_data( file_name = params.scaling.input.xray_data.wavelength2.file_name, labels = params.scaling.input.xray_data.wavelength2.labels, ignore_all_zeros = True, parameter_scope = 'scaling.input.SIR_scale.xray_data.derivative' ) info_w2 = miller_array_w2.info() miller_array_w2=miller_array_w2.map_to_asu().select( miller_array_w2.indices()!=(0,0,0) ) miller_array_w2 = miller_array_w2.select( miller_array_w2.data() > 0 ) ## Convert to amplitudes if (miller_array_w2.is_xray_intensity_array()): miller_array_w2 = miller_array_w2.f_sq_as_f() elif (miller_array_w2.is_complex_array()): miller_array_w2 = abs(miller_array_w2) if not miller_array_w2.is_real_array(): raise Sorry("miller_array_derivative is not a real array") miller_array_w2.set_info(info = info_w2) ## Make sure we have anomalous diffs in both files assert miller_array_w1.anomalous_flag() assert miller_array_w2.anomalous_flag() ## Print info print >> log print >> log, "Wavelength 1" print >> log, "============" miller_array_w1.show_comprehensive_summary(f=log) print >> log w1_pre_scale = pre_scale.pre_scaler( miller_array_w1, params.scaling.input.scaling_strategy.pre_scaler_protocol, params.scaling.input.basic) miller_array_w1 = w1_pre_scale.x1.deep_copy() del w1_pre_scale print >> log print >> log, "Wavelength 2" print >> log, "============" miller_array_w2.show_comprehensive_summary(f=log) print >> log w2_pre_scale = pre_scale.pre_scaler( miller_array_w2, params.scaling.input.scaling_strategy.pre_scaler_protocol, params.scaling.input.basic) miller_array_w2 = w2_pre_scale.x1.deep_copy() del w2_pre_scale print >> log print >> log, "Checking for possible reindexing schemes" print >> log, "----------------------------------------" print >> log print >> log, "Reindexing operator derived as described in:" print >> log, "Grosse-Kunstleve, Afonine, Sauter & Adams. (2005)." print >> log, " IUCr Computing Commission Newsletter 5." print >> log reindex_object = pair_analyses.reindexing( set_a=miller_array_w1, set_b=miller_array_w2, out=log) miller_array_w2 = reindex_object.select_and_transform() miller_array_w2.map_to_asu() print >> log print >> log, "Relative scaling of 2-wavelength mad data" print >> log, "-----------------------------------------" print >> log scaler = fa_estimation.combined_scaling( miller_array_w1, miller_array_w2, params.scaling.input.scaling_strategy.iso_protocol) miller_array_w1 = scaler.x1.deep_copy() miller_array_w2 = scaler.x2.deep_copy() del scaler print >> log print >> log, "Estimating f\" and f' ratios" print >> log, "----------------------------" print >> log # now things are scaled see if we can guestimate the ratio fdpratio = pair_analyses.f_double_prime_ratio( miller_array_w1, miller_array_w2) fpfdpratio = pair_analyses.delta_f_prime_f_double_prime_ratio( miller_array_w1, miller_array_w2) k1 = fdpratio.ratio k2 = fpfdpratio.ratio if k1 is not None: print >> log print >> log, " The estimate of f\"(w1)/f\"(w2) is %3.2f"\ %(fdpratio.ratio) if k2 is not None: print >> log, " The estimate of (f'(w1)-f'(w2))/f\"(w2) is %3.2f"\ %(fpfdpratio.ratio) print >> log print >> log, " The quality of these estimates depends to a large extend" print >> log, " on the quality of the data. If user supplied values" print >> log, " of f\" and f' are given, they will be used instead " print >> log, " of the estimates." print >> log if params.scaling.input.xray_data.wavelength1.f_double_prime is not None: if params.scaling.input.xray_data.wavelength2.f_double_prime is not None: k1 = (params.scaling.input.xray_data.wavelength1.f_double_prime/ params.scaling.input.xray_data.wavelength2.f_double_prime) print >> log, " Using user specified f\" values" print >> log, " user specified f\"(w1)/f\"(w2) is %3.2f"\ %(k1) print >> log if params.scaling.input.xray_data.wavelength1.f_prime is not None: if params.scaling.input.xray_data.wavelength2.f_prime is not None: if params.scaling.input.xray_data.wavelength2.f_double_prime is not None: k2 = (params.scaling.input.xray_data.wavelength1.f_prime- params.scaling.input.xray_data.wavelength2.f_prime)\ /params.scaling.input.xray_data.wavelength2.f_double_prime print >> log, " Using user specified f\" and f' values" print >> log, " user specified f\"(w1)/f\"(w2) is %3.2f"\ %(k2) print >> log fa_gen = fa_estimation.twmad_fa_driver(miller_array_w1, miller_array_w2, k1, k2, params.scaling.input.fa_estimation) print >> log print >> log, "writing mtz file" print >> log, "----------------" print >> log ## Please write out the abs_delta_f array fa = fa_gen.fa_values mtz_dataset = fa.as_mtz_dataset( column_root_label='F'+params.scaling.input.output.outlabel) mtz_dataset.mtz_object().write( file_name=params.scaling.input.output.hklout)
def run(self): time_total_start = time.time() args = sys.argv[1:] log = multi_out() out = sys.stdout log.register("stdout", out) log_file_name = "cryo_fit2.log" logfile = open( log_file_name, "w" ) # since it is 'w', an existing file will be overwritten. (if this is "a", new info will be appended to an existing file) log.register("logfile", logfile) logfile.write(str(date_and_time())) print('A user input model name: %s' % self.data_manager.get_default_model_name(), file=self.logger) model_inp = self.data_manager.get_model() print('A user input map file name: %s' % self.data_manager.get_default_real_map_name(), file=self.logger) map_inp = self.data_manager.get_real_map() ######### <begin> calculates CC_overall before cryo_fit2 add_CRYST1_to_pdb_file(self, logfile, map_inp, self.data_manager.get_default_model_name()) try: cc_before_cryo_fit2 = round( calculate_overall_cc(map_data=map_inp.map_data(), model=model_inp, resolution=self.params.resolution), 4) # Pavel thinks that cc_box should be pretty much similar as this cc_before_cryo_fit2 #Doo Nam confirmed that even without CRYST1 in pdb file, cc value here does not match cc_box (but close) write_this = "\n\nCC_overall before cryo_fit2 (both exploration and final MD): " + str( cc_before_cryo_fit2) + "\n" print('%s' % (write_this)) logfile.write(str(write_this)) except: write_error_message_for_overall_cc(logfile) if (self.params.cc_only == True): crystal_symmetry = mmtbx.utils.check_and_set_crystal_symmetry( models=[model_inp], map_inps=[map_inp]) report_map_model_cc(self, map_inp, model_inp, crystal_symmetry, logfile) logfile.close() exit(1) ######### <end> calculates CC_overall before cryo_fit2 write_this = "\nPreparing cryo_fit2...\n" print(write_this) logfile.write(write_this) old_style_RNA, removed_R_prefix_in_RNA_pdb_file_name = remove_R_prefix_in_RNA( self.data_manager.get_default_model_name()) if (old_style_RNA == True): write_this = '''Archaic style of nucleic acids (e.g. RA, RU, RT, RG, RC) were detected in user's pdb file. phenix can't run with this type of naming. cryo_fit2 replaced these with A,U,T,G,C and rewrote into ''' + removed_R_prefix_in_RNA_pdb_file_name + ''' Please rerun cryo_fit2 with this re-written pdb file\n''' print(write_this) logfile.write(write_this) logfile.close() exit(1) cleaned_pdb_file_name, cleaned_unusual_residue = clean_unusual_residue( self.data_manager.get_default_model_name()) if (cleaned_unusual_residue == True): write_this = ''' Unusual residue names like 34G that real_space_refine can't deal were detected in user's pdb file.\nCryo_fit2 removed these and rewrote into ''' + cleaned_pdb_file_name + '''\nPlease rerun cryo_fit2 with this re-written pdb file\n''' print(write_this) logfile.write(write_this) logfile.close() exit(1) leave_one_conformer(logfile, self.data_manager.get_default_model_name()) ############# (begin) Assign sigma/slack for H/E if ((self.params.HE_sigma != 0.05) or (self.params.HE_slack != 0.0) or (self.params.HE_angle_sigma_scale != 1) or (self.params.HE_top_out == True)): generated_eff_file_name = assign_ss_params_to_H_E(logfile, self.data_manager.get_default_model_name(), \ self.params.HE_sigma, self.params.HE_slack, self.params.HE_angle_sigma_scale,\ self.params.HE_top_out) if (generated_eff_file_name != False): sys.argv.append(generated_eff_file_name) ############# (end) Assign sigma/slack for H/E ############# (begin) Assign sigmas for nucleic_acids if ((self.params.parallelity_sigma != 0.0335) or (self.params.planarity_sigma != 0.176) or (self.params.stacking_pair_sigma != 0.027)): generated_eff_file_name_w_nucleic_acid_sigmas = assign_nucleic_acid_sigmas(logfile, \ self.data_manager.get_default_model_name(), self.params.parallelity_sigma, self.params.planarity_sigma, self.params.stacking_pair_sigma) if (generated_eff_file_name_w_nucleic_acid_sigmas != False): sys.argv.append(generated_eff_file_name_w_nucleic_acid_sigmas) ############# (end) Assign sigmas for nucleic_acids if (self.params.write_custom_geom_only == True): generated_eff_file_name = write_custom_geometry(logfile, self.data_manager.get_default_model_name(), \ self.params.stronger_ss_sigma, self.params.stronger_ss_slack) exit(1) ''' # seems wrong to assign sigma and slack ? keep for now ############# (begin) deal with Doonam's stronger_ss if (self.params.stronger_ss == True): generated_eff_file_name = write_custom_geometry(logfile, self.data_manager.get_default_model_name(), \ self.params.stronger_ss_sigma, self.params.stronger_ss_slack) sys.argv.append(generated_eff_file_name) ############# (end) deal with Doonam's stronger_ss ''' logfile.close()
def run(args): if len(args)==0: master_params.show(expert_level=100) elif ( "--help" in args ): print("no help available") elif ( "--h" in args ): print("no help available") elif ( "--show_defaults" in args ): master_params.show(expert_level=0) elif ( "--show_defaults_all" in args ): master_params.show(expert_level=10) else: log = multi_out() if (not "--quiet" in args): log.register(label="stdout", file_object=sys.stdout) string_buffer = StringIO() string_buffer_plots = StringIO() log.register(label="log_buffer", file_object=string_buffer) log_plots = StringIO() print("#phil __OFF__", file=log) print(file=log) print(date_and_time(), file=log) print(file=log) print(file=log) phil_objects = [] argument_interpreter = master_params.command_line_argument_interpreter( home_scope="scaling") reflection_file = None for arg in args: command_line_params = None arg_is_processed = False if arg == '--quiet': arg_is_processed = True ## The associated action with this keyword is implemented above if (os.path.isfile(arg)): ## is this a file name? ## Check if this is a phil file try: command_line_params = iotbx.phil.parse(file_name=arg) except KeyboardInterrupt: raise except Exception : pass if command_line_params is not None: phil_objects.append(command_line_params) arg_is_processed = True ## Check if this file is a reflection file if command_line_params is None: reflection_file = reflection_file_reader.any_reflection_file( file_name=arg, ensure_read_access=False) if (reflection_file is not None): reflection_file = arg arg_is_processed = True ## If it is not a file, it must be a phil command else: try: command_line_params = argument_interpreter.process(arg=arg) if command_line_params is not None: phil_objects.append(command_line_params) arg_is_processed = True except KeyboardInterrupt: raise except Exception : pass if not arg_is_processed: print("##----------------------------------------------##", file=log) print("## Unknown phil-file or phil-command:", arg, file=log) print("##----------------------------------------------##", file=log) print(file=log) raise Sorry("Unknown file format or phil command: %s" % arg) effective_params = master_params.fetch(sources=phil_objects) params = effective_params.extract() ## Now please read in the reflections files ## get symmetry and cell data first please ## By default, the native cell and symmetry are used ## as reference crystal_symmetry_nat = None print(params.scaling.input.xray_data.wavelength1.file_name) crystal_symmetry_nat = crystal_symmetry_from_any.extract_from( file_name=params.scaling.input.xray_data.wavelength1.file_name) if params.scaling.input.xray_data.space_group is None: params.scaling.input.xray_data.space_group =\ crystal_symmetry_nat.space_group_info() print("Using symmetry of native data", file=log) if params.scaling.input.xray_data.unit_cell is None: params.scaling.input.xray_data.unit_cell =\ crystal_symmetry_nat.unit_cell() print("Using cell of native data", file=log) ## Check if a unit cell is defined if params.scaling.input.xray_data.space_group is None: raise Sorry("No space group defined") if params.scaling.input.xray_data.unit_cell is None: raise Sorry("No unit cell defined") crystal_symmetry = crystal_symmetry = crystal.symmetry( unit_cell = params.scaling.input.xray_data.unit_cell, space_group_symbol = str( params.scaling.input.xray_data.space_group) ) effective_params = master_params.fetch(sources=phil_objects) new_params = master_params.format(python_object=params) print("Effective parameters", file=log) print("#phil __ON__", file=log) new_params.show(out=log,expert_level=params.scaling.input.expert_level) print("#phil __END__", file=log) print(file=log) ## define a xray data server xray_data_server = reflection_file_utils.reflection_file_server( crystal_symmetry = crystal_symmetry, force_symmetry = True, reflection_files=[]) ## Read in native data and make appropriate selections miller_array_w1 = None miller_array_w1 = xray_data_server.get_xray_data( file_name = params.scaling.input.xray_data.wavelength1.file_name, labels = params.scaling.input.xray_data.wavelength1.labels, ignore_all_zeros = True, parameter_scope = 'scaling.input.SIR_scale.xray_data.native' ) info_native = miller_array_w1.info() miller_array_w1=miller_array_w1.map_to_asu().select( miller_array_w1.indices()!=(0,0,0) ) miller_array_w1 = miller_array_w1.select( miller_array_w1.data() > 0 ) ## Convert to amplitudes if (miller_array_w1.is_xray_intensity_array()): miller_array_w1 = miller_array_w1.f_sq_as_f() elif (miller_array_w1.is_complex_array()): miller_array_w1 = abs(miller_array_w1) if not miller_array_w1.is_real_array(): raise Sorry("miller_array_native is not a real array") miller_array_w1.set_info(info = info_native) ## Read in derivative data and make appropriate selections miller_array_w2 = None miller_array_w2 = xray_data_server.get_xray_data( file_name = params.scaling.input.xray_data.wavelength2.file_name, labels = params.scaling.input.xray_data.wavelength2.labels, ignore_all_zeros = True, parameter_scope = 'scaling.input.SIR_scale.xray_data.derivative' ) info_w2 = miller_array_w2.info() miller_array_w2=miller_array_w2.map_to_asu().select( miller_array_w2.indices()!=(0,0,0) ) miller_array_w2 = miller_array_w2.select( miller_array_w2.data() > 0 ) ## Convert to amplitudes if (miller_array_w2.is_xray_intensity_array()): miller_array_w2 = miller_array_w2.f_sq_as_f() elif (miller_array_w2.is_complex_array()): miller_array_w2 = abs(miller_array_w2) if not miller_array_w2.is_real_array(): raise Sorry("miller_array_derivative is not a real array") miller_array_w2.set_info(info = info_w2) ## Make sure we have anomalous diffs in both files assert miller_array_w1.anomalous_flag() assert miller_array_w2.anomalous_flag() ## Print info print(file=log) print("Wavelength 1", file=log) print("============", file=log) miller_array_w1.show_comprehensive_summary(f=log) print(file=log) w1_pre_scale = pre_scale.pre_scaler( miller_array_w1, params.scaling.input.scaling_strategy.pre_scaler_protocol, params.scaling.input.basic) miller_array_w1 = w1_pre_scale.x1.deep_copy() del w1_pre_scale print(file=log) print("Wavelength 2", file=log) print("============", file=log) miller_array_w2.show_comprehensive_summary(f=log) print(file=log) w2_pre_scale = pre_scale.pre_scaler( miller_array_w2, params.scaling.input.scaling_strategy.pre_scaler_protocol, params.scaling.input.basic) miller_array_w2 = w2_pre_scale.x1.deep_copy() del w2_pre_scale print(file=log) print("Checking for possible reindexing schemes", file=log) print("----------------------------------------", file=log) print(file=log) print("Reindexing operator derived as described in:", file=log) print("Grosse-Kunstleve, Afonine, Sauter & Adams. (2005).", file=log) print(" IUCr Computing Commission Newsletter 5.", file=log) print(file=log) reindex_object = pair_analyses.reindexing( set_a=miller_array_w1, set_b=miller_array_w2, out=log) miller_array_w2 = reindex_object.select_and_transform() miller_array_w2.map_to_asu() print(file=log) print("Relative scaling of 2-wavelength mad data", file=log) print("-----------------------------------------", file=log) print(file=log) scaler = fa_estimation.combined_scaling( miller_array_w1, miller_array_w2, params.scaling.input.scaling_strategy.iso_protocol) miller_array_w1 = scaler.x1.deep_copy() miller_array_w2 = scaler.x2.deep_copy() del scaler print(file=log) print("Estimating f\" and f' ratios", file=log) print("----------------------------", file=log) print(file=log) # now things are scaled see if we can guestimate the ratio fdpratio = pair_analyses.f_double_prime_ratio( miller_array_w1, miller_array_w2) fpfdpratio = pair_analyses.delta_f_prime_f_double_prime_ratio( miller_array_w1, miller_array_w2) k1 = fdpratio.ratio k2 = fpfdpratio.ratio if k1 is not None: print(file=log) print(" The estimate of f\"(w1)/f\"(w2) is %3.2f"\ %(fdpratio.ratio), file=log) if k2 is not None: print(" The estimate of (f'(w1)-f'(w2))/f\"(w2) is %3.2f"\ %(fpfdpratio.ratio), file=log) print(file=log) print(" The quality of these estimates depends to a large extend", file=log) print(" on the quality of the data. If user supplied values", file=log) print(" of f\" and f' are given, they will be used instead ", file=log) print(" of the estimates.", file=log) print(file=log) if params.scaling.input.xray_data.wavelength1.f_double_prime is not None: if params.scaling.input.xray_data.wavelength2.f_double_prime is not None: k1 = (params.scaling.input.xray_data.wavelength1.f_double_prime/ params.scaling.input.xray_data.wavelength2.f_double_prime) print(" Using user specified f\" values", file=log) print(" user specified f\"(w1)/f\"(w2) is %3.2f"\ %(k1), file=log) print(file=log) if params.scaling.input.xray_data.wavelength1.f_prime is not None: if params.scaling.input.xray_data.wavelength2.f_prime is not None: if params.scaling.input.xray_data.wavelength2.f_double_prime is not None: k2 = (params.scaling.input.xray_data.wavelength1.f_prime- params.scaling.input.xray_data.wavelength2.f_prime)\ /params.scaling.input.xray_data.wavelength2.f_double_prime print(" Using user specified f\" and f' values", file=log) print(" user specified f\"(w1)/f\"(w2) is %3.2f"\ %(k2), file=log) print(file=log) fa_gen = fa_estimation.twmad_fa_driver(miller_array_w1, miller_array_w2, k1, k2, params.scaling.input.fa_estimation) print(file=log) print("writing mtz file", file=log) print("----------------", file=log) print(file=log) ## Please write out the abs_delta_f array fa = fa_gen.fa_values mtz_dataset = fa.as_mtz_dataset( column_root_label='F'+params.scaling.input.output.outlabel) mtz_dataset.mtz_object().write( file_name=params.scaling.input.output.hklout)
def run(args): from iotbx.option_parser import option_parser as iotbx_option_parser import libtbx.utils show_times = libtbx.utils.show_times(time_start="now") command_call = ["iotbx.python", __file__] command_line = (iotbx_option_parser( usage=" ".join(command_call) + " [options] directory|file...") .enable_chunk(easy_all=True) .enable_multiprocessing() ).process(args=args, min_nargs=1) if (command_line.run_multiprocessing_chunks_if_applicable( command_call=command_call)): show_times() return co = command_line.options # print "TIME BEGIN cod_refine:", date_and_time() print # master_phil = get_master_phil() argument_interpreter = master_phil.command_line_argument_interpreter() phil_objects = [] remaining_args = [] for arg in command_line.args: if (arg.find("=") >= 0): phil_objects.append(argument_interpreter.process(arg=arg)) else: remaining_args.append(arg) work_phil = master_phil.fetch(sources=phil_objects) work_phil.show() print params = work_phil.extract() # qi_dict = {} all_pickles = [] for arg in remaining_args: if (op.isdir(arg)): for node in sorted(os.listdir(arg)): if (node.endswith(".pickle")): all_pickles.append(op.join(arg, node)) elif (node.startswith("qi_") and len(node) == 10): qi = open(op.join(arg, node)).read().splitlines() if (len(qi) == 1): cod_id = node[3:] quick_info = eval(qi[0]) assert cod_id not in qi_dict qi_dict[cod_id] = quick_info elif (op.isfile(arg)): all_pickles.append(arg) else: raise RuntimeError("Not a file or directory: %s" % arg) print "Number of pickle files:", len(all_pickles) print "Number of quick_infos:", len(qi_dict) sort_choice = params.sorting_of_pickle_files if (len(qi_dict) != 0 and sort_choice is not None): print "Sorting pickle files by n_atoms * n_refl:", sort_choice assert sort_choice in ["down", "up"] def sort_pickle_files(): if (sort_choice == "down"): i_sign = -1 else: i_sign = 1 buffer = [] for i,path in enumerate(all_pickles): cod_id = op.basename(path).split(".",1)[0] qi = qi_dict.get(cod_id) if (qi is None): nn = 2**31 else: nn = qi[0] * qi[1] * qi[2] buffer.append((nn, i_sign*i, path)) buffer.sort() if (i_sign < 0): buffer.reverse() result = [] for elem in buffer: result.append(elem[-1]) return result all_pickles = sort_pickle_files() print # rss = params.random_subset.size if (rss is not None and rss > 0): seed = params.random_subset.seed print "Selecting subset of %d pickle files using random seed %d" % ( rss, seed) mt = flex.mersenne_twister(seed=seed) perm = mt.random_permutation(size=len(all_pickles))[:rss] flags = flex.bool(len(all_pickles), False).set_selected(perm, True) all_pickles = flex.select(all_pickles, permutation=flags.iselection()) print # from libtbx.path import makedirs_race if (params.wdir_root is not None): makedirs_race(path=params.wdir_root) if (params.pickle_refined_dir is not None): makedirs_race(path=params.pickle_refined_dir) # n_caught = 0 for i_pickle,pickle_file_name in enumerate(all_pickles): if (i_pickle % command_line.chunk.n != command_line.chunk.i): continue tm = user_plus_sys_time() try: process(params, pickle_file_name) except KeyboardInterrupt: print >> sys.stderr, "CAUGHT EXCEPTION: KeyboardInterrupt" traceback.print_exc() print >> sys.stderr sys.stderr.flush() return except Exception: sys.stdout.flush() print >> sys.stderr, "CAUGHT EXCEPTION: %s" % pickle_file_name traceback.print_exc() print >> sys.stderr sys.stderr.flush() n_caught += 1 else: print "done_with: %s (%.2f seconds)" % (pickle_file_name, tm.elapsed()) print sys.stdout.flush() print print "Number of exceptions caught:", n_caught # show_times() print print "TIME END cod_refine:", date_and_time()
def run(args): if len(args)==0: master_params.show(expert_level=0) elif ( "--help" in args ): print "no help available" elif ( "--h" in args ): print "no help available" elif ( "--show_defaults" in args ): master_params.show(expert_level=0) elif ( "--show_defaults_all" in args ): master_params.show(expert_level=10) else: log = multi_out() if (not "--quiet" in args): log.register(label="stdout", file_object=sys.stdout) string_buffer = StringIO() string_buffer_plots = StringIO() log.register(label="log_buffer", file_object=string_buffer) log_plots = StringIO() print >> log,"#phil __OFF__" print >> log print >> log, date_and_time() print >> log print >> log phil_objects = [] argument_interpreter = master_params.command_line_argument_interpreter( home_scope="scaling") reflection_file = None for arg in args: command_line_params = None arg_is_processed = False if arg == '--quiet': arg_is_processed = True ## The associated action with this keyword is implemented above if (os.path.isfile(arg)): ## is this a file name? ## Check if this is a phil file try: command_line_params = iotbx.phil.parse(file_name=arg) except KeyboardInterrupt: raise except Exception : pass if command_line_params is not None: phil_objects.append(command_line_params) arg_is_processed = True ## Check if this file is a reflection file if command_line_params is None: reflection_file = reflection_file_reader.any_reflection_file( file_name=arg, ensure_read_access=False) if (reflection_file is not None): reflection_file = arg arg_is_processed = True ## If it is not a file, it must be a phil command else: try: command_line_params = argument_interpreter.process(arg=arg) if command_line_params is not None: phil_objects.append(command_line_params) arg_is_processed = True except KeyboardInterrupt: raise except Exception : pass if not arg_is_processed: print >> log, "##----------------------------------------------##" print >> log, "## Unknown phil-file or phil-command:", arg print >> log, "##----------------------------------------------##" print >> log raise Sorry("Unknown file format or phil command: %s" % arg) effective_params = master_params.fetch(sources=phil_objects) params = effective_params.extract() ## Now please read in the reflections files ## get symmetry and cell data first please ## By default, the native cell and symmetry are used ## as reference crystal_symmetry_nat = None crystal_symmetry_nat = crystal_symmetry_from_any.extract_from( file_name=params.scaling.input.xray_data.after_burn.file_name) if params.scaling.input.xray_data.space_group is None: params.scaling.input.xray_data.space_group =\ crystal_symmetry_nat.space_group_info() print >> log, "Using symmetry of after_burn data" if params.scaling.input.xray_data.unit_cell is None: params.scaling.input.xray_data.unit_cell =\ crystal_symmetry_nat.unit_cell() print >> log, "Using cell of after_burn data" ## Check if a unit cell is defined if params.scaling.input.xray_data.space_group is None: raise Sorry("No space group defined") if params.scaling.input.xray_data.unit_cell is None: raise Sorry("No unit cell defined") crystal_symmetry = crystal_symmetry = crystal.symmetry( unit_cell = params.scaling.input.xray_data.unit_cell, space_group_symbol = str( params.scaling.input.xray_data.space_group) ) effective_params = master_params.fetch(sources=phil_objects) new_params = master_params.format(python_object=params) print >> log, "Effective parameters" print >> log, "#phil __ON__" new_params.show(out=log, expert_level=params.scaling.input.expert_level ) print >> log, "#phil __END__" print >> log ## define a xray data server xray_data_server = reflection_file_utils.reflection_file_server( crystal_symmetry = crystal_symmetry, force_symmetry = True, reflection_files=[]) ## Read in native data and make appropriatre selections miller_array_native = None miller_array_native = xray_data_server.get_xray_data( file_name = params.scaling.input.xray_data.after_burn.file_name, labels = params.scaling.input.xray_data.after_burn.labels, ignore_all_zeros = True, parameter_scope = 'scaling.input.SIR_scale.xray_data.after_burn' ) info_native = miller_array_native.info() miller_array_native=miller_array_native.map_to_asu().select( miller_array_native.indices()!=(0,0,0) ) miller_array_native = miller_array_native.select( miller_array_native.data() > 0 ) ## Convert to amplitudes if (miller_array_native.is_xray_intensity_array()): miller_array_native = miller_array_native.f_sq_as_f() elif (miller_array_native.is_complex_array()): miller_array_native = abs(miller_array_native) if not miller_array_native.is_real_array(): raise Sorry("miller_array_native is not a real array") miller_array_native.set_info(info = info_native) ## Read in derivative data and make appropriate selections miller_array_derivative = None miller_array_derivative = xray_data_server.get_xray_data( file_name = params.scaling.input.xray_data.before_burn.file_name, labels = params.scaling.input.xray_data.before_burn.labels, ignore_all_zeros = True, parameter_scope = 'scaling.input.SIR_scale.xray_data.before_burn' ) info_derivative = miller_array_derivative.info() miller_array_derivative=miller_array_derivative.map_to_asu().select( miller_array_derivative.indices()!=(0,0,0) ) miller_array_derivative = miller_array_derivative.select( miller_array_derivative.data() > 0 ) ## Convert to amplitudes if (miller_array_derivative.is_xray_intensity_array()): miller_array_derivative = miller_array_derivative.f_sq_as_f() elif (miller_array_derivative.is_complex_array()): miller_array_derivative = abs(miller_array_derivative) if not miller_array_derivative.is_real_array(): raise Sorry("miller_array_derivative is not a real array") miller_array_derivative.set_info(info = info_derivative) ## As this is a SIR case, we will remove any anomalous pairs if miller_array_derivative.anomalous_flag(): miller_array_derivative = miller_array_derivative.average_bijvoet_mates()\ .set_observation_type( miller_array_derivative ) if miller_array_native.anomalous_flag(): miller_array_native = miller_array_native.average_bijvoet_mates()\ .set_observation_type( miller_array_native ) ## Print info print >> log print >> log, "Native data" print >> log, "===========" miller_array_native.show_comprehensive_summary(f=log) print >> log native_pre_scale = pre_scale.pre_scaler( miller_array_native, params.scaling.input.scaling_strategy.pre_scaler_protocol, params.scaling.input.basic) miller_array_native = native_pre_scale.x1.deep_copy() del native_pre_scale print >> log print >> log, "Derivative data" print >> log, "===============" miller_array_derivative.show_comprehensive_summary(f=log) print >> log derivative_pre_scale = pre_scale.pre_scaler( miller_array_derivative, params.scaling.input.scaling_strategy.pre_scaler_protocol, params.scaling.input.basic) miller_array_derivative = derivative_pre_scale.x1.deep_copy() del derivative_pre_scale scaler = fa_estimation.combined_scaling( miller_array_native, miller_array_derivative, params.scaling.input.scaling_strategy.iso_protocol) miller_array_native = scaler.x1.deep_copy() miller_array_derivative = scaler.x2.deep_copy() del scaler print >> log print >> log, "Making delta f's" print >> log, "----------------" print >> log delta_gen = pair_analyses.delta_generator( miller_array_native, miller_array_derivative, params.scaling.input.scaling_strategy.iso_protocol.nsr_bias ) print >> log print >> log, "writing mtz file" print >> log, "----------------" print >> log ## some assertions to make sure nothing went weerd assert miller_array_native.observation_type() is not None assert miller_array_derivative.observation_type() is not None assert delta_gen.abs_delta_f.observation_type() is not None ## Please write out the abs_delta_f array mtz_dataset = delta_gen.abs_delta_f.as_mtz_dataset( column_root_label='F'+params.scaling.input.output.outlabel) mtz_dataset.mtz_object().write( file_name=params.scaling.input.output.hklout)
def run(args): if len(args) == 0: master_params.show(expert_level=0) elif ("--help" in args): print("no help available") elif ("--h" in args): print("no help available") elif ("--show_defaults" in args): master_params.show(expert_level=0) elif ("--show_defaults_all" in args): master_params.show(expert_level=10) else: log = multi_out() if (not "--quiet" in args): log.register(label="stdout", file_object=sys.stdout) string_buffer = StringIO() string_buffer_plots = StringIO() log.register(label="log_buffer", file_object=string_buffer) log_plots = StringIO() print("#phil __OFF__", file=log) print(file=log) print(date_and_time(), file=log) print(file=log) print(file=log) phil_objects = [] argument_interpreter = master_params.command_line_argument_interpreter( home_scope="scaling") reflection_file = None for arg in args: command_line_params = None arg_is_processed = False if arg == '--quiet': arg_is_processed = True ## The associated action with this keyword is implemented above if (os.path.isfile(arg)): ## is this a file name? ## Check if this is a phil file try: command_line_params = iotbx.phil.parse(file_name=arg) except KeyboardInterrupt: raise except Exception: pass if command_line_params is not None: phil_objects.append(command_line_params) arg_is_processed = True ## Check if this file is a reflection file if command_line_params is None: reflection_file = reflection_file_reader.any_reflection_file( file_name=arg, ensure_read_access=False) if (reflection_file is not None): reflection_file = arg arg_is_processed = True ## If it is not a file, it must be a phil command else: try: command_line_params = argument_interpreter.process(arg=arg) if command_line_params is not None: phil_objects.append(command_line_params) arg_is_processed = True except KeyboardInterrupt: raise except Exception: pass if not arg_is_processed: print("##----------------------------------------------##", file=log) print("## Unknown phil-file or phil-command:", arg, file=log) print("##----------------------------------------------##", file=log) print(file=log) raise Sorry("Unknown file format or phil command: %s" % arg) effective_params = master_params.fetch(sources=phil_objects) params = effective_params.extract() ## Now please read in the reflections files ## get symmetry and cell data first please ## By default, the native cell and symmetry are used ## as reference crystal_symmetry_nat = None crystal_symmetry_nat = crystal_symmetry_from_any.extract_from( file_name=params.scaling.input.xray_data.native.file_name) if params.scaling.input.xray_data.space_group is None: params.scaling.input.xray_data.space_group =\ crystal_symmetry_nat.space_group_info() print("Using symmetry of native data", file=log) if params.scaling.input.xray_data.unit_cell is None: params.scaling.input.xray_data.unit_cell =\ crystal_symmetry_nat.unit_cell() print("Using cell of native data", file=log) ## Check if a unit cell is defined if params.scaling.input.xray_data.space_group is None: raise Sorry("No space group defined") if params.scaling.input.xray_data.unit_cell is None: raise Sorry("No unit cell defined") crystal_symmetry = crystal_symmetry = crystal.symmetry( unit_cell=params.scaling.input.xray_data.unit_cell, space_group_symbol=str(params.scaling.input.xray_data.space_group)) effective_params = master_params.fetch(sources=phil_objects) new_params = master_params.format(python_object=params) print("Effective parameters", file=log) print("#phil __ON__", file=log) new_params.show(out=log, expert_level=params.scaling.input.expert_level) print("#phil __END__", file=log) print(file=log) ## define a xray data server xray_data_server = reflection_file_utils.reflection_file_server( crystal_symmetry=crystal_symmetry, force_symmetry=True, reflection_files=[]) ## Read in native data and make appropriatre selections miller_array_native = None miller_array_native = xray_data_server.get_xray_data( file_name=params.scaling.input.xray_data.native.file_name, labels=params.scaling.input.xray_data.native.labels, ignore_all_zeros=True, parameter_scope='scaling.input.SIR_scale.xray_data.native') info_native = miller_array_native.info() miller_array_native = miller_array_native.map_to_asu().select( miller_array_native.indices() != (0, 0, 0)) miller_array_native = miller_array_native.select( miller_array_native.data() > 0) ## Convert to amplitudes if (miller_array_native.is_xray_intensity_array()): miller_array_native = miller_array_native.f_sq_as_f() elif (miller_array_native.is_complex_array()): miller_array_native = abs(miller_array_native) if not miller_array_native.is_real_array(): raise Sorry("miller_array_native is not a real array") miller_array_native.set_info(info=info_native) ## Read in derivative data and make appropriate selections miller_array_derivative = None miller_array_derivative = xray_data_server.get_xray_data( file_name=params.scaling.input.xray_data.derivative.file_name, labels=params.scaling.input.xray_data.derivative.labels, ignore_all_zeros=True, parameter_scope='scaling.input.SIR_scale.xray_data.derivative') info_derivative = miller_array_derivative.info() miller_array_derivative = miller_array_derivative.map_to_asu().select( miller_array_derivative.indices() != (0, 0, 0)) miller_array_derivative = miller_array_derivative.select( miller_array_derivative.data() > 0) ## Convert to amplitudes if (miller_array_derivative.is_xray_intensity_array()): miller_array_derivative = miller_array_derivative.f_sq_as_f() elif (miller_array_derivative.is_complex_array()): miller_array_derivative = abs(miller_array_derivative) if not miller_array_derivative.is_real_array(): raise Sorry("miller_array_derivative is not a real array") miller_array_derivative.set_info(info=info_derivative) ## As this is a SIR case, we will remove any anomalous pairs if miller_array_derivative.anomalous_flag(): miller_array_derivative = miller_array_derivative.average_bijvoet_mates()\ .set_observation_type( miller_array_derivative ) if miller_array_native.anomalous_flag(): miller_array_native = miller_array_native.average_bijvoet_mates()\ .set_observation_type( miller_array_native ) ## Print info print(file=log) print("Native data", file=log) print("===========", file=log) miller_array_native.show_comprehensive_summary(f=log) print(file=log) native_pre_scale = pre_scale.pre_scaler( miller_array_native, params.scaling.input.scaling_strategy.pre_scaler_protocol, params.scaling.input.basic) miller_array_native = native_pre_scale.x1.deep_copy() del native_pre_scale print(file=log) print("Derivative data", file=log) print("===============", file=log) miller_array_derivative.show_comprehensive_summary(f=log) print(file=log) derivative_pre_scale = pre_scale.pre_scaler( miller_array_derivative, params.scaling.input.scaling_strategy.pre_scaler_protocol, params.scaling.input.basic) miller_array_derivative = derivative_pre_scale.x1.deep_copy() del derivative_pre_scale scaler = fa_estimation.combined_scaling( miller_array_native, miller_array_derivative, params.scaling.input.scaling_strategy.iso_protocol) miller_array_native = scaler.x1.deep_copy() miller_array_derivative = scaler.x2.deep_copy() del scaler print(file=log) print("Making delta f's", file=log) print("----------------", file=log) print(file=log) delta_gen = pair_analyses.delta_generator(miller_array_native, miller_array_derivative) print(file=log) print("writing mtz file", file=log) print("----------------", file=log) print(file=log) ## some assertions to make sure nothing went weerd assert miller_array_native.observation_type() is not None assert miller_array_derivative.observation_type() is not None assert delta_gen.abs_delta_f.observation_type() is not None ## Please write out the abs_delta_f array mtz_dataset = delta_gen.abs_delta_f.as_mtz_dataset( column_root_label='F' + params.scaling.input.output.outlabel) mtz_dataset.mtz_object().write( file_name=params.scaling.input.output.hklout)
def run(args): from iotbx.option_parser import option_parser as iotbx_option_parser import libtbx.utils show_times = libtbx.utils.show_times(time_start="now") command_call = ["iotbx.python", __file__] command_line = (iotbx_option_parser( usage=" ".join(command_call) + " [options] directory|file...").enable_chunk( easy_all=True).enable_multiprocessing()).process(args=args, min_nargs=1) if (command_line.run_multiprocessing_chunks_if_applicable( command_call=command_call)): show_times() return co = command_line.options # print("TIME BEGIN pdb_dev:", date_and_time()) print() libtbx.utils.host_and_user().show() print() sys.stdout.flush() # from cctbx.omz import cod_refine master_phil = cod_refine.get_master_phil( max_atoms=None, f_calc_options_algorithm="direct *fft", bulk_solvent_correction=True) argument_interpreter = master_phil.command_line_argument_interpreter() phil_objects = [] remaining_args = [] for arg in command_line.args: if (arg.find("=") >= 0): phil_objects.append(argument_interpreter.process(arg=arg)) else: remaining_args.append(arg) work_phil = master_phil.fetch(sources=phil_objects) work_phil.show() print() params = work_phil.extract() # mtz_pdb_pairs = [] arg_iter = iter(remaining_args) pdb_v3_mirror_dir = os.environ.get("PDB_MIRROR_PDB") assert pdb_v3_mirror_dir is None or op.isdir(pdb_v3_mirror_dir) cci_pdbmtz_path = os.environ.get("CCI_PDBMTZ") assert cci_pdbmtz_path is None or op.isdir(cci_pdbmtz_path) for arg in arg_iter: def get_next(expected_exts): def raise_bad_file(what, fn=None): msg = "%s file name (%s expected)" % ( what, " or ".join(expected_exts)) if (fn is None): msg += "." else: msg += ": " + show_string(fn) raise RuntimeError(msg) try: arg = next(arg_iter) except StopIteration: raise_bad_file("Missing") if (not arg.endswith(tuple(expected_exts))): raise_bad_file("Unexpected", arg) return arg if (op.isfile(arg) and arg.endswith((".mtz", ".pdb", ".ent"))): if (arg.endswith(".mtz")): fn_mtz = arg fn_pdb = get_next([".pdb", ".ent"]) else: fn_pdb = arg fn_mtz = get_next([".mtz"]) else: fn_mtz = arg + ".mtz" def raise_mtz_but_no_pdb(): raise RuntimeError("MTZ file found but no PDB file: %s" % show_string(fn_mtz)) if (op.isfile(fn_mtz)): for ext in [".pdb", ".ent"]: fn_pdb = arg + ext if (op.isfile(fn_pdb)): break else: raise_mtz_but_no_pdb() else: fn_mtz = op.join(cci_pdbmtz_path, arg + ".mtz") if (not op.isfile(fn_mtz)): raise RuntimeError("MTZ file not found: %s" % show_string(fn_mtz)) fn_pdb = op.join(pdb_v3_mirror_dir, arg[1:3], "pdb" + arg + ".ent.gz") if (not op.isfile(fn_pdb)): raise_mtz_but_no_pdb() mtz_pdb_pairs.append((fn_mtz, fn_pdb)) # n_caught = 0 for i_pair, mtz_pdb_pair in enumerate(mtz_pdb_pairs): if (i_pair % command_line.chunk.n != command_line.chunk.i): continue tm = user_plus_sys_time() try: process(params, mtz_pdb_pair) except KeyboardInterrupt: print("CAUGHT EXCEPTION: KeyboardInterrupt", file=sys.stderr) traceback.print_exc() print(file=sys.stderr) sys.stderr.flush() return except Exception: sys.stdout.flush() print("CAUGHT EXCEPTION: %s" % ", ".join(mtz_pdb_pair), file=sys.stderr) traceback.print_exc() print(file=sys.stderr) sys.stderr.flush() n_caught += 1 else: print("done_with: %s, %s (%.2f seconds)" % (mtz_pdb_pair + (tm.elapsed(), ))) print() sys.stdout.flush() print() print("Number of exceptions caught:", n_caught) # show_times() print() print("TIME END pdb_dev:", date_and_time()) sys.stdout.flush()