def run(file_name, pdb_code): pdb_inp = iotbx.pdb.input(file_name=file_name) pdb_hierarchy = pdb_inp.construct_hierarchy() n_atoms = pdb_hierarchy.atoms().size() if (n_atoms > 10000): return None if (len(list(pdb_hierarchy.models())) > 1): return None fraction_of_nonH_incomplete = complete_model(pdb_hierarchy=pdb_hierarchy) cs = pdb_inp.crystal_symmetry() resolution = get_resolution(pdb_inp=pdb_inp) super_cell = expand(pdb_hierarchy=pdb_hierarchy, crystal_symmetry=cs, create_restraints_manager=False) symmetry_ss_bonds = find_ss_across_symmetry(super_cell=super_cell) result_occupancies = get_altloc_counts(pdb_hierarchy=pdb_hierarchy) ligands = get_non_standard_items(pdb_hierarchy=pdb_hierarchy) result = group_args( number_of_atoms=pdb_hierarchy.atoms().size(), number_of_atoms_super_sphere=super_cell.ph_super_sphere.atoms().size(), occupancies=result_occupancies, unit_cell=cs.unit_cell().parameters(), space_group_symbol=cs.space_group().type().lookup_symbol(), resolution=resolution, data_type=pdb_inp.get_experiment_type(), ligands=ligands, symmetry_ss_bonds=symmetry_ss_bonds, fraction_of_nonH_incomplete=fraction_of_nonH_incomplete) easy_pickle.dump(pdb_code + ".pkl", result)
def rebuild_pickle_files(data_dir, file_prefix, target_db, amino_acids): from libtbx import easy_pickle from libtbx.str_utils import show_string from mmtbx.rotamer.n_dim_table import NDimTable os.chdir(data_dir) print("Processing data files in %s:" % show_string(data_dir)) for aa, aafile in amino_acids.items(): data_file = file_prefix+aafile+".data" pickle_file = file_prefix+aafile+".pickle" pair_info = target_db.pair_info( source_path=data_file, target_path=pickle_file, path_prefix=data_dir) print(" %s -> %s:" % (data_file, pickle_file), end=' ') if not pair_info.needs_update: print("already up to date.") else: print("converting ...", end=' ') sys.stdout.flush() pair_info.start_building_target() ndt = NDimTable.createFromText(data_file) easy_pickle.dump(file_name=pickle_file, obj=ndt) pair_info.done_building_target() print("done.") sys.stdout.flush() target_db.write()
def run(file_name): import time from libtbx import easy_pickle from dials.array_family import flex t0 = time.time() refl = flex.reflection_table.from_pickle(file_name) t1 = time.time() print "Time reflection_table.from_pickle(): %.3f" %(t1-t0) refl.as_pickle('tmp.pickle') t2 = time.time() print "Time reflection_table.as_pickle(): %.3f" %(t2-t1) d = dict(((k, refl[k]) for k in refl.keys())) t3 = time.time() easy_pickle.dump('tmp.pickle', d) t4 = time.time() print "Time pickle dict: %.3f" %(t4-t3) for k, v in d.iteritems(): t0 = time.time() easy_pickle.dump('tmp.pickle', v) t1 = time.time() print "Column %s (%s): %.3f" %(k, type(v), t1-t0)
def run(self): good_init, msg = self.init.run( ) # Returns False if something goes wrong if not good_init: if msg: print(msg) util.iota_exit() if self.init.args.full: stage = 'all' else: stage = 'import' # Save init and image iterable for potential UI recovery from libtbx import easy_pickle easy_pickle.dump(self.init.init_file, self.init) easy_pickle.dump(self.init.iter_file, self.init.input_list) abort_file = os.path.join(self.init.int_base, '.abort') processor = XProcessAll(init=self.init, iterable=self.init.input_list, stage=stage, abort_file=abort_file) processor.start()
def run(self): ''' Parse the options. ''' from dials.util.options import flatten_experiments, flatten_reflections # Parse the command line arguments params, options = self.parser.parse_args(show_diff_phil=True) self.params = params experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) assert len(reflections) == len(experiments) == 1 reflections = reflections[0] exp = experiments[0] from dials.algorithms.indexing import index_reflections from dials.algorithms.indexing.indexer import indexer_base reflections['id'] = flex.int(len(reflections), -1) reflections['imageset_id'] = flex.int(len(reflections), 0) reflections = indexer_base.map_spots_pixel_to_mm_rad(reflections, exp.detector, exp.scan) indexer_base.map_centroids_to_reciprocal_space( reflections, exp.detector, exp.beam, exp.goniometer,) index_reflections(reflections, experiments, params.d_min, tolerance=0.3) indexed_reflections = reflections.select(reflections['miller_index'] != (0,0,0)) print "Indexed %d reflections out of %d"%(len(indexed_reflections), len(reflections)) easy_pickle.dump("indexedstrong.pickle", indexed_reflections)
def plot_venn(params): roots = [] tags = [] for path in params.input_path: roots.append(os.path.abspath(os.path.join(path, 'out'))) tags.append(path.strip().split('/')[-1]) if params.ts_from_cbf: results = get_indexed_ts_from_cbf(roots) else: results = get_indexed_ts(roots) print('DONE WITH TIMESAMPS') if len(results) == 2: try: from matplotlib_venn import venn2 as venn_plotter except ImportError as e: raise Sorry(message) elif len(results) == 3: try: from matplotlib_venn import venn3 as venn_plotter except ImportError as e: raise Sorry(message) else: raise Sorry( 'matplotlib_venn does not currently support plotting anything other than 2 or 3 sets' ) print('NOW PLOTTING') fig_object = plt.figure() venn_plotter(results, set_labels=tags) print('DONE PLOTTING') if params.pickle_plot: from libtbx.easy_pickle import dump dump('%s' % params.pickle_filename, fig_object) if params.show_plot: plt.show()
def run(args): import os to_pickle = "--pickle" in args for file_name in args: if (file_name.startswith("--")): continue print file_name + ":" f = open(file_name, "r") t0 = os.times() reflection_file = cns_reflection_file(f) tn = os.times() t_parse = tn[0]+tn[1]-t0[0]-t0[1] f.close() reflection_file.show_summary() print crystal_symmetry = crystal.symmetry((), "P 1") miller_arrays = reflection_file.as_miller_arrays(crystal_symmetry) for miller_array in miller_arrays: miller_array.show_summary() print if (to_pickle): pickle_file_name = os.path.split(file_name)[1] + ".pickle" t0 = os.times() easy_pickle.dump(pickle_file_name, reflection_file) tn = os.times() t_dump = tn[0]+tn[1]-t0[0]-t0[1] t0 = os.times() easy_pickle.load(pickle_file_name) tn = os.times() t_load = tn[0]+tn[1]-t0[0]-t0[1] print "parse: %.2f, dump: %.2f, load: %.2f" % (t_parse, t_dump, t_load) print t = os.times() print "u+s,u,s: %.2f %.2f %.2f" % (t[0] + t[1], t[0], t[1])
def exercise(file_name): path = libtbx.env.find_in_repositories("mmtbx/idealized_aa_residues/data") pdb_inp = iotbx.pdb.input(file_name=path + "/" + file_name) pdb_hierarchy = pdb_inp.construct_hierarchy() xrs = pdb_inp.xray_structure_simple() residue = pdb_hierarchy.only_residue() clusters = mmtbx.refinement.real_space.aa_residue_axes_and_clusters( residue=residue, mon_lib_srv=mon_lib_srv, backbone_sample=False).clusters ri = mmtbx.refinement.real_space.fit_residue.get_rotamer_iterator( mon_lib_srv=mon_lib_srv, residue=residue) if (len(clusters) == 0): return for rotamer, rotamer_sites_cart in ri: residue.atoms().set_xyz(rotamer_sites_cart) xrs = xrs.replace_sites_cart(rotamer_sites_cart) states = mmtbx.utils.states(xray_structure=xrs, pdb_hierarchy=pdb_hierarchy) t0 = time.time() states, good_angles, nested_loop = torsion_search_nested( residue=residue, clusters=clusters, rotamer_eval=rotamer_eval, states=states) tt = time.time() - t0 states.write(file_name="%s_all-coarse_step10.pdb" % file_name[:-4]) break print "file_name, n_clusters, n_good_angles, total:", file_name, \ len(clusters), len(good_angles), len(nested_loop), tt easy_pickle.dump(file_name="%s-coarse_step10_favored.pickle" % file_name[:-4], obj=good_angles)
def run(file_name): import time from libtbx import easy_pickle from dials.array_family import flex t0 = time.time() refl = flex.reflection_table.from_pickle(file_name) t1 = time.time() print("Time reflection_table.from_pickle(): %.3f" % (t1 - t0)) refl.as_pickle("tmp.pickle") t2 = time.time() print("Time reflection_table.as_pickle(): %.3f" % (t2 - t1)) d = dict(((k, refl[k]) for k in refl.keys())) t3 = time.time() easy_pickle.dump("tmp.pickle", d) t4 = time.time() print("Time pickle dict: %.3f" % (t4 - t3)) for k, v in d.iteritems(): t0 = time.time() easy_pickle.dump("tmp.pickle", v) t1 = time.time() print("Column %s (%s): %.3f" % (k, type(v), t1 - t0))
def main(argv = None): if (argv is None): argv = sys.argv outpath = "average_prutt_00001.pickle" # XXX Should be argument! img_sum = None dist_sum = 0 nrg_sum = 0 nmemb = 0 for arg in argv[1:]: # XXX ugly hack! if (False): # XXX Should be argument? img_sum, dist_sum, nrg_sum, nmemb = img_add( arg, img_sum, dist_sum, nrg_sum, nmemb) else: img_sum, dist_sum, nrg_sum, nmemb = spot_add( arg, img_sum, dist_sum, nrg_sum, nmemb) if (nmemb == 0): return (0) # XXX Post-mortem--avoid overflows! But breaks distance and energy! #nmemb = 1.0 * img_sum.max() / (2**14 - 16) easy_pickle.dump(outpath, dict(beamEnrg = 1.0 / nmemb * nrg_sum, distance = 1.0 / nmemb * dist_sum, image = 1.0 / nmemb * img_sum), # XXX implicit cast? ) print "Wrote average of %d images to '%s'" % (nmemb, outpath) return (0)
def run(self): """Parse the options.""" from dials.util.options import flatten_experiments, flatten_reflections # Parse the command line arguments params, options = self.parser.parse_args(show_diff_phil=True) self.params = params experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) assert len(reflections) == len(experiments) == 1 reflections = reflections[0] exp = experiments[0] from dials.algorithms.indexing import index_reflections from dials.algorithms.indexing.indexer import Indexer reflections["id"] = flex.int(len(reflections), -1) reflections["imageset_id"] = flex.int(len(reflections), 0) reflections = Indexer.map_spots_pixel_to_mm_rad( reflections, exp.detector, exp.scan) Indexer.map_centroids_to_reciprocal_space(reflections, exp.detector, exp.beam, exp.goniometer) index_reflections(reflections, experiments, params.d_min, tolerance=0.3) indexed_reflections = reflections.select( reflections["miller_index"] != (0, 0, 0)) print("Indexed %d reflections out of %d" % (len(indexed_reflections), len(reflections))) easy_pickle.dump("indexedstrong.pickle", indexed_reflections)
def exercise () : params = runtime_utils.process_master_phil.extract() i = 0 while True : output_dir = os.path.join(os.getcwd(), "simple_run%d" % i) if os.path.exists(output_dir) : i += 1 else : os.makedirs(output_dir) break run = runtime_utils.simple_run(output_dir) params.output_dir = output_dir params.buffer_stdout = False params.tmp_dir = output_dir # driver = runtime_utils.detached_process_driver(output_dir, run) params.run_file = os.path.join(output_dir, "run.pkl") eff_file = os.path.join(output_dir, "run.eff") working_phil = runtime_utils.process_master_phil.format(python_object=params) working_phil.show(out=open(eff_file, "w")) easy_pickle.dump(params.run_file, run) easy_run.call("libtbx.start_process %s &" % eff_file) #params.run_file) client = runtime_utils.simple_client(params) client.run() assert (client.out.getvalue() == """\ current is 44444.444444 current is 50000.000000 current is 57142.857143 current is 66666.666667 """) assert client.n_cb >= 5 # this is variable! assert ([ cb.message for cb in client._accumulated_callbacks ] == ['run 0', 'run 1', 'run 2', 'run 3'])
def run_indexing(datablock, strong_spots, crystal_model, rmsds): cwd = os.path.abspath(os.curdir) tmp_dir = os.path.abspath(open_tmp_directory(suffix="test_dials_index")) os.chdir(tmp_dir) sweep_path = os.path.join(tmp_dir, "datablock.json") pickle_path = os.path.join(tmp_dir, "strong.pickle") dump.datablock(datablock, sweep_path) easy_pickle.dump(pickle_path, strong_spots) from dials.test.algorithms.indexing.tst_index import run_one_indexing space_group_info = crystal_model.get_space_group() symmetry = crystal.symmetry(unit_cell=crystal_model.get_unit_cell(), space_group=crystal_model.get_space_group()) expected_rmsds = [1.1*r for r in rmsds] imageset = datablock[0].extract_imagesets()[0] pixel_size = imageset.get_detector()[0].get_pixel_size() phi_width = imageset.get_scan().get_oscillation()[1] * math.pi/180 expected_rmsds = [1.1 * rmsds[0] * pixel_size[0], 1.1 * rmsds[1] * pixel_size[1], 1.1 * rmsds[2] * phi_width] run_one_indexing(pickle_path=pickle_path, sweep_path=sweep_path, extra_args=[], expected_unit_cell=symmetry.minimum_cell().unit_cell(), expected_rmsds=expected_rmsds, #expected_hall_symbol=crystal_model.get_space_group().type().hall_symbol(), expected_hall_symbol=' P 1', )
def run_indexing(datablock, strong_spots, crystal_model, rmsds): sweep_path = "datablock.json" pickle_path = "strong.pickle" dump.datablock(datablock, sweep_path) easy_pickle.dump(pickle_path, strong_spots) space_group_info = crystal_model.get_space_group() symmetry = crystal.symmetry(unit_cell=crystal_model.get_unit_cell(), space_group=crystal_model.get_space_group()) expected_rmsds = [1.1 * r for r in rmsds] imageset = datablock[0].extract_imagesets()[0] pixel_size = imageset.get_detector()[0].get_pixel_size() phi_width = imageset.get_scan().get_oscillation()[1] * math.pi / 180 expected_rmsds = [ 1.1 * rmsds[0] * pixel_size[0], 1.1 * rmsds[1] * pixel_size[1], 1.1 * rmsds[2] * phi_width ] run_one_indexing( pickle_path=pickle_path, sweep_path=sweep_path, extra_args=[], expected_unit_cell=symmetry.minimum_cell().unit_cell(), expected_rmsds=expected_rmsds, expected_hall_symbol=' P 1', )
def save_param_file(self, file_name, sources=None, extra_phil="", diff_only=False, save_state=False, replace_path=None): if sources is None: sources = [] if extra_phil != "": self.merge_phil(phil_string=extra_phil, rebuild_index=False) final_phil = self.master_phil.fetch(sources=[self.working_phil] + list(sources)) if diff_only: output_phil = self.master_phil.fetch_diff(source=final_phil) else: output_phil = final_phil if (replace_path is not None): substitute_directory_name(phil_object=output_phil, path_name=replace_path, sub_name="LIBTBX_BASE_DIR") try: f = smart_open.for_writing(file_name, "w") except IOError as e: raise Sorry(str(e)) else: if (replace_path is not None): f.write("LIBTBX_BASE_DIR = \"%s\"\n" % replace_path) output_phil.show(out=f) f.close() if save_state: cache_file = "%s_cache.pkl" % file_name easy_pickle.dump(cache_file, self)
def select_cctbx(self): """ Selects best grid search result using the Selector class """ if os.path.isfile(self.abort_file): self.fail = 'aborted' return self if self.fail == None: from iota.components.iota_cctbx import Selector selector = Selector( self.grid, self.final, self.params.cctbx.selection.prefilter.flag_on, self.params.cctbx.selection.prefilter.target_uc_tolerance, self.params.cctbx.selection.prefilter.target_pointgroup, self.params.cctbx.selection.prefilter.target_unit_cell, self.params.cctbx.selection.prefilter.min_reflections, self.params.cctbx.selection.prefilter.min_resolution, self.params.cctbx.selection.select_by) self.fail, self.final, log_entry = selector.select() self.status = 'selection' self.log_info.append(log_entry) # Save results into a pickle file ep.dump(self.obj_file, self) return self
def construct_frames_from_files(refl_name, json_name, outname=None, outdir=None): importer = Importer([refl_name, json_name], read_experiments=True, read_reflections=True, check_format=False) if importer.unhandled: print("unable to process:", importer.unhandled) reflections_l = flatten_reflections(importer.reflections)[0] experiments_l = flatten_experiments(importer.experiments) frames = [] if outdir is None: outdir = '.' if outname is None: outname = 'int-%d' + refl_name.split( '.pickle')[0] + '_extracted.pickle' elif '%' not in outname: outname = outname.split(".pickle")[0] + ("_%d.pickle") for i in range(len(experiments_l)): refl = reflections_l.select(reflections_l['id'] == i) if len(refl) == 0: continue expt = experiments_l[i] frame = ConstructFrame(refl, expt).make_frame() name = outname % i easy_pickle.dump(os.path.join(outdir, name), frame)
def write(self): assert self.file_name is not None easy_pickle.dump(file_name=self.file_name_during_write, obj=self.pair_infos) if (os.path.exists(self.file_name)): os.remove(self.file_name) os.rename(self.file_name_during_write, self.file_name)
def exercise(file_name): path=libtbx.env.find_in_repositories("mmtbx/idealized_aa_residues/data") pdb_inp = iotbx.pdb.input(file_name=path+"/"+file_name) pdb_hierarchy = pdb_inp.construct_hierarchy() xrs = pdb_inp.xray_structure_simple() residue = pdb_hierarchy.only_residue() clusters = mmtbx.refinement.real_space.aa_residue_axes_and_clusters( residue = residue, mon_lib_srv = mon_lib_srv, backbone_sample = False).clusters ri = mmtbx.refinement.real_space.fit_residue.get_rotamer_iterator( mon_lib_srv = mon_lib_srv, residue = residue) if(len(clusters)==0): return for rotamer, rotamer_sites_cart in ri: residue.atoms().set_xyz(rotamer_sites_cart) xrs= xrs.replace_sites_cart(rotamer_sites_cart) states = mmtbx.utils.states(xray_structure=xrs, pdb_hierarchy=pdb_hierarchy) t0 = time.time() states, good_angles, nested_loop = torsion_search_nested( residue = residue, clusters = clusters, rotamer_eval = rotamer_eval, states = states) tt = time.time()-t0 states.write(file_name="%s_all-coarse_step10.pdb"%file_name[:-4]) break print "file_name, n_clusters, n_good_angles, total:", file_name, \ len(clusters), len(good_angles), len(nested_loop), tt easy_pickle.dump( file_name="%s-coarse_step10.pickle"%file_name[:-4], obj=good_angles)
def do_work(img_no): n_fails = 0 while True: try: raw_data = data.get_raw_data(img_no) break except (KeyError, ValueError): n_fails += 1 print "Fail to read, attempt number", n_fails if n_fails > 100: raise Exception("Couldn't read the data") import time time.sleep(n_fails * 0.1) imgdict = cspad_tbx.dpack(data=raw_data, distance=distance, pixel_size=pixel_size, wavelength=wavelength, beam_center_x=beam_x, beam_center_y=beam_y, ccd_image_saturation=overload, saturated_value=overload, address="Sacla.MPCCD.8tile", active_areas=active_areas) imgdict = crop_image_pickle( imgdict, preserve_active_areas_even_though_cropping_would_invalidate_them=True) dest_path = os.path.join(dest_dir, dest_base + "_%06d.pickle" % img_no) print "Saving image", img_no, "to", dest_path easy_pickle.dump(dest_path, imgdict)
def run_call_back(flags, space_group_info, params): structure_shake = random_structure.xray_structure( space_group_info, elements=("N", "C", "O", "S", "Yb"), volume_per_atom=200, min_distance=2.0, general_positions_only=params.general_positions_only, random_u_iso=True) structure_ideal = structure_shake.deep_copy_scatterers() structure_shake.shake_sites_in_place( rms_difference=params.shake_sites_rmsd) structure_shake.shake_adp(spread=params.shake_adp_spread) # run_id = "" if (params.pickle_root_name is not None): run_id += params.pickle_root_name + "_" run_id += str(space_group_info).replace(" ", "").replace("/", "_").lower() if (params.pickle_root_name is not None): pickle_file_name = run_id + "_ideal_shake.pickle" print("writing file:", pickle_file_name) easy_pickle.dump(file_name=pickle_file_name, obj=(structure_ideal, structure_shake)) print() sys.stdout.flush() # ls_result = run_refinement(structure_ideal=structure_ideal, structure_shake=structure_shake, params=params, run_id=run_id) if (ls_result is not None and params.pickle_root_name is not None): pickle_file_name = run_id + "_ls_history.pickle" print("writing file:", pickle_file_name) easy_pickle.dump(file_name=pickle_file_name, obj=ls_result.history) print() sys.stdout.flush()
def select_cctbx(self): """ Selects best grid search result using the Selector class """ if os.path.isfile(self.abort_file): self.fail = 'aborted' return self if self.fail == None: from iota.components.iota_cctbx import Selector selector = Selector(self.grid, self.final, self.params.cctbx.selection.prefilter.flag_on, self.params.cctbx.selection.prefilter.target_uc_tolerance, self.params.cctbx.selection.prefilter.target_pointgroup, self.params.cctbx.selection.prefilter.target_unit_cell, self.params.cctbx.selection.prefilter.min_reflections, self.params.cctbx.selection.prefilter.min_resolution, self.params.cctbx.selection.select_by) self.fail, self.final, log_entry = selector.select() self.status = 'selection' self.log_info.append(log_entry) # Save results into a pickle file ep.dump(self.obj_file, self) return self
def write_integration_pickles(self, integrated, experiments): if self.write_pickle: from libtbx import easy_pickle if not hasattr(self, frame): self.construct_frame(integrated, experiments) easy_pickle.dump(self.params.output.integration_pickle, self.frame)
def exercise(): params = runtime_utils.process_master_phil.extract() i = 0 while True: output_dir = os.path.join(os.getcwd(), "simple_run%d" % i) if os.path.exists(output_dir): i += 1 else: os.makedirs(output_dir) break run = runtime_utils.simple_run(output_dir) params.output_dir = output_dir params.buffer_stdout = False params.tmp_dir = output_dir # driver = runtime_utils.detached_process_driver(output_dir, run) params.run_file = os.path.join(output_dir, "run.pkl") eff_file = os.path.join(output_dir, "run.eff") working_phil = runtime_utils.process_master_phil.format( python_object=params) with open(eff_file, "w") as f: working_phil.show(out=f) easy_pickle.dump(params.run_file, run) easy_run.call("libtbx.start_process %s &" % eff_file) #params.run_file) client = runtime_utils.simple_client(params) client.run() assert (client.out.getvalue() == """\ current is 44444.444444 current is 50000.000000 current is 57142.857143 current is 66666.666667 """) assert client.n_cb >= 5 # this is variable! assert ([cb.message for cb in client._accumulated_callbacks ] == ['run 0', 'run 1', 'run 2', 'run 3'])
def main(argv=None): if (argv is None): argv = sys.argv outpath = "average_prutt_00001.pickle" # XXX Should be argument! img_sum = None dist_sum = 0 nrg_sum = 0 nmemb = 0 for arg in argv[1:]: # XXX ugly hack! if (False): # XXX Should be argument? img_sum, dist_sum, nrg_sum, nmemb = img_add( arg, img_sum, dist_sum, nrg_sum, nmemb) else: img_sum, dist_sum, nrg_sum, nmemb = spot_add( arg, img_sum, dist_sum, nrg_sum, nmemb) if (nmemb == 0): return (0) # XXX Post-mortem--avoid overflows! But breaks distance and energy! #nmemb = 1.0 * img_sum.max() / (2**14 - 16) easy_pickle.dump( outpath, dict(beamEnrg=1.0 / nmemb * nrg_sum, distance=1.0 / nmemb * dist_sum, image=1.0 / nmemb * img_sum), # XXX implicit cast? ) print("Wrote average of %d images to '%s'" % (nmemb, outpath)) return (0)
def __init__(self, output_dirname, runs, pickle_pattern=None): avg_basename="avg_" stddev_basename="stddev" self.adu_offset = 0 self.histogram = None self.nmemb = 0 for i_run, run in enumerate(runs): run_scratch_dir = run result = finalise_one_run(run_scratch_dir, pickle_pattern=pickle_pattern) if result.histogram is None: continue if self.histogram is None: self.histogram = result.histogram else: self.histogram = update_histograms(self.histogram, result.histogram) self.nmemb += result.nmemb if (output_dirname is not None and avg_basename is not None): if (not os.path.isdir(output_dirname)): os.makedirs(output_dirname) pickle_path = os.path.join(output_dirname, "hist.pickle") easy_pickle.dump(pickle_path, self.histogram) print "Total number of images used from %i runs: %i" %(i_run+1, self.nmemb)
def endjob(self, obj1, obj2=None): """The endjob() function finalises the mean and standard deviation images and writes them to disk. @param evt Event object (psana only) @param env Environment object """ if obj2 is None: env = obj1 else: evt = obj1 env = obj2 super(pixel_histograms, self).endjob(env) d = { "nmemb": self.nmemb, "histogram": self.histograms, } pickle_path = os.path.join(self.pickle_dirname, self.pickle_basename+str(env.subprocess())+".pickle") easy_pickle.dump(pickle_path, d) self.logger.info( "Pickle written to %s" % self.pickle_dirname) if (self.nfail == 0): self.logger.info( "%d images processed" % self.nmemb) else: self.logger.warning( "%d images processed, %d failed" % (self.nmemb, self.nfail))
def run(files, gain, prefix): from libtbx import easy_pickle for file in files: f = easy_pickle.load(file) old_miller = f['observations'][0] new_miller = old_miller.customized_copy(sigmas=gain * old_miller.sigmas()) f['observations'][0] = new_miller easy_pickle.dump(prefix + file, f)
def run(args, cutoff, max_n_terms, six_term=False, params=None, plots_dir="kissel_fits_plots", verbose=0): if (params is None): params = cctbx.eltbx.gaussian_fit.fit_parameters( max_n_terms=max_n_terms) chunk_n = 1 chunk_i = 0 if (len(args) > 0 and len(args[0].split(",")) == 2): chunk_n, chunk_i = [int(i) for i in args[0].split(",")] args = args[1:] if (not six_term): if (not os.path.isdir(plots_dir)): print "No plots because target directory does not exist (mkdir %s)." % \ plots_dir plots_dir = None if (chunk_n > 1): assert plots_dir is not None i_chunk = 0 for file_name in args: flag = i_chunk % chunk_n == chunk_i i_chunk += 1 if (not flag): continue results = {} results["fit_parameters"] = params tab = kissel_io.read_table(file_name) more_selection = tab.itvc_sampling_selection() fit_selection = more_selection & (tab.x <= cutoff + 1.e-6) null_fit = scitbx.math.gaussian.fit( tab.x.select(fit_selection), tab.y.select(fit_selection), tab.sigmas.select(fit_selection), xray_scattering.gaussian(0, False)) null_fit_more = scitbx.math.gaussian.fit( tab.x.select(more_selection), tab.y.select(more_selection), tab.sigmas.select(more_selection), xray_scattering.gaussian(0, False)) if (not six_term): results[tab.element] = cctbx.eltbx.gaussian_fit.incremental_fits( label=tab.element, null_fit=null_fit, params=params, plots_dir=plots_dir, verbose=verbose) else: best_min = scitbx.math.gaussian_fit.fit_with_golay_starts( label=tab.element, null_fit=null_fit, null_fit_more=null_fit_more, params=params) g = best_min.final_gaussian_fit results[tab.element] = [xray_scattering.fitted_gaussian( stol=g.table_x()[-1], gaussian_sum=g)] sys.stdout.flush() pickle_file_name = "%s_fits.pickle" % identifier(tab.element) easy_pickle.dump(pickle_file_name, results)
def pickle(self, pickle_file, pickle_object, overwrite=True): """Takes an object and pickles it""" if os.path.exists(pickle_file) and not overwrite: self.log('NOT PICKLING: {!s}'.format( os.path.relpath(pickle_file, start=self.out_dir))) else: self.log('Pickling Object: {!s}'.format( os.path.relpath(pickle_file, start=self.out_dir))) easy_pickle.dump(pickle_file, pickle_object)
def save_hit(self): #self.set_ssx() self.result_folder = self.options['output_directory'] self.num = self.options['num'] if self.options['roi'].lower() is not 'none': if 'eiger' in self.options['detector'].lower() and 'h5' in self.options['file_extension']: self.data = self.h5[self.group][self.index,::] self.data[self.data >= self.ovl] = 0 else: self.data = self.img.data # Conversion to edf if 'edf' in self.options['output_formats']: OutputFileName = os.path.join(self.result_folder, 'EDF_%s' % self.num.zfill(3), "%s.edf" % self.root) edfout = fabio.edfimage.edfimage(data=self.data.astype(np.float32)) edfout.write(OutputFileName) if 'cbf' in self.options['output_formats']: OutputFileName = os.path.join(self.result_folder, 'CBF_%s' % self.num.zfill(3), "%s.cbf" % self.root) cbfout = fabio.cbfimage.cbfimage(data=self.data.astype(np.float32)) cbfout.write(OutputFileName) # Conversion to H5 if 'hdf5' in self.options['output_formats']: OutputFileName = os.path.join(self.result_folder, 'HDF5_%s_%s' % (self.options['filename_root'], self.num.zfill(3)), "%s.h5" % self.root) OutputFile = h5py.File(OutputFileName, 'w') OutputFile.create_dataset("data", data=self.data, compression="gzip", dtype=self.type) if self.options['bragg_search']: OutputFile.create_dataset("processing/hitfinder/peakinfo", data=self.peaks.astype(np.int)) OutputFile.close() # Conversion to Pickle if cctbx and 'pickles' in self.options['output_formats']: # def get_ovl(det): if 'pilatus' in self.detector.name.lower(): ovl = 1048500 if 'eiger' in self.detector.name.lower(): ovl = self.ovl pixels = flex.int(self.data.astype(np.int32)) pixel_size = self.detector.pixel1 data = dpack(data=pixels, distance=self.options['distance'], pixel_size=pixel_size, wavelength=self.options['wavelength'], beam_center_x=self.options['beam_y'] * pixel_size, beam_center_y=self.options['beam_x'] * pixel_size, ccd_image_saturation= ovl, saturated_value= ovl) #data = crop_image_pickle(data) OutputFileName = os.path.join(self.result_folder, 'PICKLES_%s_%s' %(self.options['filename_root'], self.num.zfill(3)), "%s.pickle" % self.root) easy_pickle.dump(OutputFileName, data)
def target_and_gradients(self, x): self.update(x=x) f, g = self.restraints_manager.target_and_gradients( sites_cart=flex.vec3_double(self.x)) if (self.dump_gradients is not None): from libtbx import easy_pickle easy_pickle.dump(self.dump_gradients, g) STOP() return f, g.as_double()
def save_image( command_line, imgpath, scan, raw_data, distance, pixel_size, wavelength, beam_x, beam_y, overload, timestamp, image_number=None, ): if image_number is None: destpath = os.path.join( os.path.dirname(imgpath), os.path.splitext(os.path.basename(imgpath))[0] + ".pickle", ) else: destpath = os.path.join( os.path.dirname(imgpath), os.path.splitext(os.path.basename(imgpath))[0] + "%05d.pickle" % image_number, ) if command_line.options.skip_converted and os.path.isfile(destpath): if command_line.options.verbose: print("Skipping %s, file exists" % imgpath) return data = dpack( data=raw_data, distance=distance, pixel_size=pixel_size, wavelength=wavelength, beam_center_x=beam_x, beam_center_y=beam_y, ccd_image_saturation=overload, saturated_value=overload, timestamp=timestamp, ) if scan is not None: osc_start, osc_range = scan.get_oscillation() if osc_start != osc_range: data["OSC_START"] = osc_start data["OSC_RANGE"] = osc_range data["TIME"] = scan.get_exposure_times()[0] if command_line.options.crop: data = crop_image_pickle(data) if command_line.options.verbose: print("Writing", destpath) easy_pickle.dump(destpath, data)
def run(params): counter = 0 reference = None root=params.input_path fig_object = plt.figure() good_total = fail_total = 0 for filename in os.listdir(root): if os.path.splitext(filename)[1] != '.log': continue if 'rank' not in filename: continue fail_timepoints = [] good_timepoints = [] rank = int(filename.split('_')[1].split('.')[0]) counter += 1 print (filename, rank) for line in open(os.path.join(root,filename)): if not line.startswith('idx------finis-------->'): continue try: _, _, _, _, ts, _, elapsed = line.strip().split() ts = float(ts) except ValueError: continue if reference is None: reference = ts - float(elapsed) status = 'done' if status in ['stop','done','fail']: if status == 'done': good_timepoints.append(ts-reference) else: fail_timepoints.append(ts-reference) ok = True else: ok = False plt.plot(fail_timepoints, [rank]*len(fail_timepoints), 'b.') plt.plot(good_timepoints, [rank]*len(good_timepoints), 'g.') fail_total += len(fail_timepoints) good_total += len(good_timepoints) if not ok: plt.plot([ts - reference], [rank], 'rx') #if counter > 100: break fail_deltas = [fail_timepoints[i+1] - fail_timepoints[i] for i in range(len(fail_timepoints)-1)] good_deltas = [good_timepoints[i+1] - good_timepoints[i] for i in range(len(good_timepoints)-1)] if fail_deltas: print("Five number summary of %d fail image processing times:"%fail_total, five_number_summary(flex.double(fail_deltas))) if good_deltas: print("Five number summary of %d good image processing times:"%good_total, five_number_summary(flex.double(good_deltas))) for i in range(params.num_nodes): plt.plot([0,params.wall_time], [i*params.num_cores_per_node-0.5, i*params.num_cores_per_node-0.5], 'r-') plt.xlabel('Wall time (sec)') plt.ylabel('MPI Rank Number') plt.title(params.plot_title) if params.pickle_plot: from libtbx.easy_pickle import dump dump('%s'%params.pickle_filename, fig_object) if params.show_plot: plt.show()
def predict_spots_from_rayonix_crystal_model(self, experiments, observed): """ Reads in the indexed rayonix model, predict spots using the crystal model on the jungfrau detector""" pass # Make sure experimental model for rayonix is supplied. Also the experimental geometry of the jungfrau is supplied assert self.params.LS49.path_to_rayonix_crystal_models is not None, 'Rayonix crystal model path is empty. Needs to be specified' assert self.params.LS49.path_to_jungfrau_detector_model is not None, 'Jungfrau_detector model path is empty. Needs to be specified' ts = self.tag.split( '_' )[-1] # Assuming jungfrau cbfs are names as 'jungfrauhit_20180501133315870' # Load rayonix experimental model rayonix_fname = os.path.join( self.params.LS49.path_to_rayonix_crystal_models, 'idx-%s_integrated_experiments.json' % ts) rayonix_expt = ExperimentListFactory.from_json_file(rayonix_fname, check_format=False) jungfrau_det = ExperimentListFactory.from_json_file( self.params.LS49.path_to_jungfrau_detector_model, check_format=False) # Reset stuff here # Should have # a. Jungfrau detector geometry # b. Rayonix indexed crystal model from dials.algorithms.refinement.prediction.managed_predictors import ExperimentsPredictorFactory from dials.algorithms.indexing import index_reflections experiments[0].detector = jungfrau_det[0].detector experiments[0].crystal = rayonix_expt[0].crystal if False: observed['id'] = flex.int(len(observed), -1) observed['imageset_id'] = flex.int(len(observed), 0) observed.centroid_px_to_mm(experiments[0].detector, experiments[0].scan) observed.map_centroids_to_reciprocal_space( experiments[0].detector, experiments[0].beam, experiments[0].goniometer) index_reflections(observed, experiments) ref_predictor = ExperimentsPredictorFactory.from_experiments( experiments) ref_predictor(observed) observed['id'] = flex.int(len(observed), 0) from libtbx.easy_pickle import dump dump('my_observed_prediction_%s.pickle' % self.tag, observed) dumper = ExperimentListDumper(experiments) dumper.as_json('my_observed_prediction_%s.json' % self.tag) predictor = StillsReflectionPredictor(experiments[0]) ubx = predictor.for_ub(experiments[0].crystal.get_A()) ubx['id'] = flex.int(len(ubx), 0) n_predictions = len(ubx) n_observed = len(observed) if len(observed) > 3 and len(ubx) >= len(observed): from libtbx.easy_pickle import dump dump('my_prediction_%s.pickle' % self.tag, ubx) dumper = ExperimentListDumper(experiments) dumper.as_json('my_prediction_%s.json' % self.tag) #from IPython import embed; embed(); exit() exit()
def write_integration_pickles(self): ''' This is streamlined vs. the code in stills_indexer, since the filename convention is set up upstream. ''' from libtbx import easy_pickle from xfel.command_line.frame_extractor import ConstructFrame self.frame = ConstructFrame(self.integrated, self.experiments[0]).make_frame() self.frame["pixel_size"] = self.experiments[0].detector[0].get_pixel_size()[0] easy_pickle.dump(self.phil.output.integration_pickle, self.frame)
def run(args): command_line = (option_parser( usage="iotbx.reflection_file_reader [options] reflection_file ...", description="Example: iotbx.reflection_file_reader w1.sca w2.mtz w3.cns" ).enable_symmetry_comprehensive().option( None, "--weak_symmetry", action="store_true", default=False, help="symmetry on command line is weaker than symmetry found in files" ).option( None, "--show_data", action="store_true", default=False, help="show Miller indices and data of all arrays" ).option( None, "--pickle", action="store", type="string", help="write all data to FILE ('--pickle .' copies name of input file)", metavar="FILE")).process(args=args) if (len(command_line.args) == 0): command_line.parser.show_help() return if (command_line.options.show_data): verbose = 3 else: verbose = 2 all_miller_arrays = collect_arrays( file_names=command_line.args, crystal_symmetry=command_line.symmetry, force_symmetry=not command_line.options.weak_symmetry, discard_arrays=command_line.options.pickle is None, verbose=verbose, report_out=sys.stdout) if (all_miller_arrays is not None and len(all_miller_arrays) > 0): if (len(all_miller_arrays) == 1): all_miller_arrays = all_miller_arrays[0] pickle_file_name = command_line.options.pickle if (pickle_file_name == "."): if (len(command_line.args) > 1): raise Sorry( "Ambiguous name for pickle file (more than one input file)." ) pickle_file_name = os.path.basename(command_line.args[0]) if (pickle_file_name.lower().endswith(".pickle")): raise Sorry("Input file is already a pickle file.") if (not pickle_file_name.lower().endswith(".pickle")): pickle_file_name += ".pickle" print() print("Writing all Miller arrays to file:", pickle_file_name) easy_pickle.dump(pickle_file_name, all_miller_arrays) print()
def run(self): map_inp = None miller_array = None print('Using model: %s' % self.data_manager.get_default_model_name(), file=self.logger) model = self.data_manager.get_model() if self.data_manager.has_map_coefficients(): miller_arrays = self.data_manager.get_miller_arrays() miller_array = self.find_label(miller_arrays=miller_arrays) print('Using miller array: %s' % miller_array.info().label_string(), file=self.logger) elif self.data_manager.has_real_maps(): print('Using map: %s' % self.data_manager.get_default_real_map_name(), file=self.logger) map_inp = self.data_manager.get_real_map() print("CCP4 map statistics:", file=self.logger) map_inp.show_summary(out=self.logger, prefix=" ") if (self.params.output_base is None): pdb_base = os.path.basename( self.data_manager.get_default_model_name()) self.params.output_base = os.path.splitext( pdb_base)[0] + "_emringer" if not self.params.quiet: plots_dir = self.params.output_base + "_plots" if (not os.path.isdir(plots_dir)): os.makedirs(plots_dir) task_obj = mmtbx.ringer.emringer.emringer(model=model, miller_array=miller_array, map_inp=map_inp, params=self.params, out=self.logger) task_obj.validate() task_obj.run() self.results = task_obj.get_results() ringer_result = self.results.ringer_result if not self.params.quiet: # save as pickle easy_pickle.dump("%s.pkl" % self.params.output_base, ringer_result) print('Wrote %s.pkl' % self.params.output_base, file=self.logger) # save as CSV csv = "\n".join([r.format_csv() for r in ringer_result]) open("%s.csv" % self.params.output_base, "w").write(csv) print('Wrote %s.csv' % self.params.output_base, file=self.logger) scoring_result = self.results.scoring_result scoring_result.show_summary(out=self.logger)
def run(args): parser = argparse.ArgumentParser() parser.add_argument("--prefix", help="result path", default="myTestDB", type=str) parser.add_argument("-path", help="db path", type=str) parser.add_argument("--np", help="number of point covering [0,1]", default=50, type=int) parser.add_argument("--fix_dx", help="Whether keeping default dx=0.7A or not", default=True, type=bool) parser.add_argument("--nmax", help="nmax", default=20, type=int) parser.add_argument("--qmax", help="rmax", default=0.3, type=float) args = parser.parse_args() path = args.path print("filepath:", path) nmax = args.nmax np = args.np fix_dx = args.fix_dx prefix = args.prefix print("prefix:", prefix) pdb_dir = os.listdir(path) files = [f for f in pdb_dir if f.endswith('pdb')] sorted_files = sorted( files, key=lambda oneFileName: int(oneFileName.split(".")[0])) nlm_coefs = [] codes = [] for file in sorted_files: code = file.split('\n')[0].split('.')[0] file = path + file mom_obj, vox_obj, pdb = pdb2zernike.zernike_moments( file, nmax=nmax, np=np, fix_dx=fix_dx, coef_out=False, calc_intensity=False) if (mom_obj is None): print(code, "NOT processed, please check the file") continue codes.append(code) nlm_coefs.append(mom_obj.moments().coefs().deep_copy()) print(code, "processed.") easy_pickle.dump(prefix + ".nlm", nlm_coefs) easy_pickle.dump(prefix + ".codes", codes)
def _main(args, out=sys.stdout): """ Main entry point to this script. Parameters ---------- args : list of str List of arguments, should not include the first argument with the executable name. out : file, optional """ usage_string = """\ phenix.python -m mmtbx.ions.svm.dump_sites model.pdb data.mtz [options ...] Utility to dump information about the properties of water and ion sites in a model. This properties include local environment, electron density maps, and atomic properties. """ cmdline = load_model_and_data( args=args, master_phil=master_phil(), out=out, process_pdb_file=True, create_fmodel=True, prefer_anomalous=True, set_wavelength_from_model_header=True, set_inelastic_form_factors="sasaki", usage_string=usage_string, ) params = cmdline.params params.use_svm = True make_header("Inspecting sites", out=out) manager = ions.identify.create_manager( pdb_hierarchy=cmdline.pdb_hierarchy, fmodel=cmdline.fmodel, geometry_restraints_manager=cmdline.geometry, wavelength=params.input.wavelength, params=params, verbose=params.debug, nproc=params.nproc, log=out, ) manager.show_current_scattering_statistics(out=out) sites = dump_sites(manager) out_name = os.path.splitext( params.input.pdb.file_name[0])[0] + "_sites.pkl" print("Dumping to", out_name, file=out) easy_pickle.dump(out_name, sites)
def callback_other(self, data): if not data.cached: return if data.accumulate: self._accumulated_callbacks.append(data) touch_file(self.info_lock) easy_pickle.dump(self.info_file, self._accumulated_callbacks) os.remove(self.info_lock) else: touch_file(self.state_lock) easy_pickle.dump(self.state_file, data) os.remove(self.state_lock)
def callback_other (self, data) : if not data.cached : return if data.accumulate : self._accumulated_callbacks.append(data) touch_file(self.info_lock) easy_pickle.dump(self.info_file, self._accumulated_callbacks) os.remove(self.info_lock) else : touch_file(self.state_lock) easy_pickle.dump(self.state_file, data) os.remove(self.state_lock)
def run(args): to_pickle = "--pickle" in args for file_name in args: if (file_name.startswith("--")): continue s = reader(open(file_name, "r")) miller_array = s.as_miller_array(info="From file: " + file_name) miller_array.show_summary() if (to_pickle): pickle_file_name = os.path.split(file_name)[1] + ".pickle" print("Writing:", pickle_file_name) easy_pickle.dump(pickle_file_name, miller_array) print()
def run(args): to_pickle = "--pickle" in args for file_name in args: if (file_name.startswith("--")): continue s = reader(open(file_name, "r")) miller_array = s.as_miller_array(info="From file: "+file_name) miller_array.show_summary() if (to_pickle): pickle_file_name = os.path.split(file_name)[1] + ".pickle" print "Writing:", pickle_file_name easy_pickle.dump(pickle_file_name, miller_array) print
def write_integration_pickles(self, integrated, experiments, callback = None): """ Write a serialized python dictionary with integrated intensities and other information suitible for use by cxi.merge or prime.postrefine. @param integrated Reflection table with integrated intensities @param experiments Experiment list. One integration pickle for each experiment will be created. @param callback Deriving classes can use callback to make further modifications to the dictionary before it is serialized. Callback should be a function with this signature: def functionname(params, outfile, frame), where params is the phil scope, outfile is the path to the pickle that will be saved, and frame is the python dictionary to be serialized. """ try: picklefilename = self.params.output.integration_pickle except AttributeError: return if self.params.output.integration_pickle is not None: from libtbx import easy_pickle import os from xfel.command_line.frame_extractor import ConstructFrame from dials.array_family import flex # Split everything into separate experiments for pickling for e_number in xrange(len(experiments)): experiment = experiments[e_number] e_selection = integrated['id'] == e_number reflections = integrated.select(e_selection) frame = ConstructFrame(reflections, experiment).make_frame() frame["pixel_size"] = experiment.detector[0].get_pixel_size()[0] if not hasattr(self, 'tag') or self.tag is None: try: # if the data was a file on disc, get the path event_timestamp = os.path.splitext(experiments[0].imageset.paths()[0])[0] except NotImplementedError: # if the data is in memory only, check if the reader set a timestamp on the format object event_timestamp = experiment.imageset.reader().get_format(0).timestamp event_timestamp = os.path.basename(event_timestamp) if event_timestamp.find("shot-")==0: event_timestamp = os.path.splitext(event_timestamp)[0] # micromanage the file name else: event_timestamp = self.tag if hasattr(self.params.output, "output_dir"): outfile = os.path.join(self.params.output.output_dir, self.params.output.integration_pickle%(e_number,event_timestamp)) else: outfile = os.path.join(os.path.dirname(self.params.output.integration_pickle), self.params.output.integration_pickle%(e_number,event_timestamp)) if callback is not None: callback(self.params, outfile, frame) easy_pickle.dump(outfile, frame)
def _main(args, out=sys.stdout): """ Main entry point to this script. Parameters ---------- args : list of str List of arguments, should not include the first argument with the executable name. out : file, optional """ usage_string = """\ phenix.python -m mmtbx.ions.svm.dump_sites model.pdb data.mtz [options ...] Utility to dump information about the properties of water and ion sites in a model. This properties include local environment, electron density maps, and atomic properties. """ cmdline = load_model_and_data( args=args, master_phil=master_phil(), out=out, process_pdb_file=True, create_fmodel=True, prefer_anomalous=True, set_wavelength_from_model_header=True, set_inelastic_form_factors="sasaki", usage_string=usage_string, ) params = cmdline.params params.use_svm = True make_header("Inspecting sites", out=out) manager = ions.identify.create_manager( pdb_hierarchy=cmdline.pdb_hierarchy, fmodel=cmdline.fmodel, geometry_restraints_manager=cmdline.geometry, wavelength=params.input.wavelength, params=params, verbose=params.debug, nproc=params.nproc, log=out, ) manager.show_current_scattering_statistics(out=out) sites = dump_sites(manager) out_name = os.path.splitext(params.input.pdb.file_name[0])[0] + "_sites.pkl" print >> out, "Dumping to", out_name easy_pickle.dump(out_name, sites)
def make_pickle(motif): pwd = os.getcwd() fingerprints_dir = libtbx.env.find_in_repositories( "cctbx_project/mmtbx/cablam/fingerprints") if fingerprints_dir is None: raise Sorry("""\ Problem locating cablam fingerprints dir""") os.chdir(fingerprints_dir) filename = motif.motif_name + ".pickle" print "Converting", motif.motif_name, "to pickle file . . ." easy_pickle.dump(file_name=filename,obj=motif) print ". . . Done" os.chdir(pwd)
def run_function_as_detached_process_in_dialog ( parent, thread_function, title, message, tmp_dir, callback=None, project_id=None, job_id=None) : if (tmp_dir is None) : tmp_dir = os.getcwd() params = runtime_utils.process_master_phil.extract() params.tmp_dir = tmp_dir if (job_id is None) : job_id = str(os.getpid()) + "_" + str(int(random.random() * 1000)) params.prefix = str(job_id) target = runtime_utils.detached_process_driver(target=thread_function) run_file = os.path.join(tmp_dir, "libtbx_run_%s.pkl" % job_id) easy_pickle.dump(run_file, target) params.run_file = run_file eff_file = os.path.join(tmp_dir, "libtbx_run_%s.eff" % job_id) runtime_utils.write_params(params, eff_file) dlg = ProcessDialog( parent=parent, message=message, caption=title, callback=callback) setup_process_gui_events( window=dlg, OnExcept=dlg.OnError, OnAbort=dlg.OnAbort, OnComplete=dlg.OnComplete) agent = event_agent( window=dlg, project_id=project_id, job_id=job_id) process = detached_process(params, proxy=agent) cb = event_agent(dlg, project_id=project_id, job_id=job_id) easy_run.call("libtbx.start_process \"%s\" &" % eff_file) result = None abort = False if (dlg.run(process) == wx.ID_OK) : result = dlg.get_result() elif dlg.exception_raised() : dlg.handle_error() elif (dlg.was_aborted()) : abort = True wx.CallAfter(dlg.Destroy) if (abort) : raise Abort() return result
def run (args, out=sys.stdout) : import mmtbx.command_line cmdline = mmtbx.command_line.load_model_and_data( args=args, master_phil=master_phil(), process_pdb_file=False, out=out, usage_string="mmtbx.fmodel_simple model.pdb data.mtz [options]") fmodel = cmdline.fmodel fmodel_info = fmodel.info() fmodel_info.show_rfactors_targets_scales_overall(out=out) easy_pickle.dump(cmdline.params.output_file, fmodel) print >> out, "Wrote fmodel to %s" % cmdline.params.output_file return fmodel
def run(args): command_line = (option_parser( usage="iotbx.reflection_file_reader [options] reflection_file ...", description="Example: iotbx.reflection_file_reader w1.sca w2.mtz w3.cns") .enable_symmetry_comprehensive() .option(None, "--weak_symmetry", action="store_true", default=False, help="symmetry on command line is weaker than symmetry found in files") .option(None, "--show_data", action="store_true", default=False, help="show Miller indices and data of all arrays") .option(None, "--pickle", action="store", type="string", help="write all data to FILE ('--pickle .' copies name of input file)", metavar="FILE") ).process(args=args) if (len(command_line.args) == 0): command_line.parser.show_help() return if (command_line.options.show_data): verbose = 3 else: verbose = 2 all_miller_arrays = collect_arrays( file_names=command_line.args, crystal_symmetry=command_line.symmetry, force_symmetry=not command_line.options.weak_symmetry, discard_arrays=command_line.options.pickle is None, verbose=verbose, report_out=sys.stdout) if (all_miller_arrays is not None and len(all_miller_arrays) > 0): if (len(all_miller_arrays) == 1): all_miller_arrays = all_miller_arrays[0] pickle_file_name = command_line.options.pickle if (pickle_file_name == "."): if (len(command_line.args) > 1): raise Sorry( "Ambiguous name for pickle file (more than one input file).") pickle_file_name = os.path.basename(command_line.args[0]) if (pickle_file_name.lower().endswith(".pickle")): raise Sorry("Input file is already a pickle file.") if (not pickle_file_name.lower().endswith(".pickle")): pickle_file_name += ".pickle" print print "Writing all Miller arrays to file:", pickle_file_name easy_pickle.dump(pickle_file_name, all_miller_arrays) print
def __call__ (self) : if (self.log_file is not None) : log = open(self.log_file, "w") new_out = multi_out() new_out.register("log", log) new_out.register("stdout", sys.stdout) sys.stdout = new_out self._out = new_out result = self.run() easy_pickle.dump(self.file_name, result) if (self._out is not None) and (not getattr(self._out, "closed", False)) : self._out.flush() # FIXME #self._out.close() return result
def generate_random_f_calc(space_group_info, n_elements=10, d_min=1.5): structure = random_structure.xray_structure( space_group_info, elements=["Si"]*n_elements, volume_per_atom=1000, min_distance=3., general_positions_only=False) structure.show_summary().show_scatterers() print f_calc = structure.structure_factors( d_min=d_min, anomalous_flag=False).f_calc() f_calc.show_summary() print print "Writing file: map_coeff.pickle" easy_pickle.dump("map_coeff.pickle", f_calc) print
def run(args): assert len(args) == 3 d1 = easy_pickle.load(args[0]) d2 = easy_pickle.load(args[1]) image_1 = d1["DATA"] image_2 = d2["DATA"] assert image_1.all() == image_2.all() diff_image = image_1 - image_2 d = cspad_tbx.dpack( data=diff_image, timestamp=cspad_tbx.evt_timestamp(), distance=1, ) easy_pickle.dump(args[2], d)
def dump_den_network(self): den_dump = {} self.get_selection_strings() for chain in self.den_atom_pairs.keys(): den_dump[chain] = [] for pair in self.den_atom_pairs[chain]: i_seq_1 = pair[0] i_seq_2 = pair[1] select_1 = self.selection_string_hash[i_seq_1] select_2 = self.selection_string_hash[i_seq_2] dump_pair = (select_1, select_2) den_dump[chain].append(dump_pair) output_prefix = "den" easy_pickle.dump( "%s.pkl"%output_prefix, den_dump)
def run(args): for f in args: try: file_object = smart_open.for_reading(file_name=f) miller_arrays = iotbx.cif.reader(file_object=file_object).as_miller_arrays() except KeyboardInterrupt: raise except Exception, e: print "Error extracting miller arrays from file: %s:" % ( show_string(f)) print " ", str(e) continue for miller_array in miller_arrays: miller_array.show_comprehensive_summary() print r, _ = op.splitext(op.basename(f)) easy_pickle.dump(file_name=r+'_miller_arrays.pickle', obj=miller_arrays)
def run (args, out=sys.stdout) : from mmtbx.disorder import analyze_model import mmtbx.validation.molprobity import mmtbx.command_line cmdline = mmtbx.command_line.load_model_and_data( args=args, master_phil=master_phil(), require_data=False, create_fmodel=True, process_pdb_file=True, usage_string="mmtbx.analyze_static_disorder model.pdb", out=out) hierarchy = cmdline.pdb_hierarchy params = cmdline.params validation = mmtbx.validation.molprobity.molprobity( pdb_hierarchy=hierarchy, xray_structure=cmdline.xray_structure, fmodel=cmdline.fmodel, crystal_symmetry=cmdline.crystal_symmetry, geometry_restraints_manager=cmdline.geometry, header_info=None, keep_hydrogens=False, outliers_only=False, nuclear=False) segments = [] make_header("Analyzing model", out=out) if (params.ignore_inconsistent_occupancy) : print >> out, "Discontinuous occupancies will be ignored." process = analyze_model.process_pdb_hierarchy( pdb_hierarchy=hierarchy, validation=validation, ignore_inconsistent_occupancy=params.ignore_inconsistent_occupancy, log=out) make_sub_header("MolProbity validation", out=out) validation.show_summary(out=out) make_sub_header("Disorder analysis", out=out) if (process.n_disordered == 0) : print >> out, "No alternate conformations found." else : process.show(out=out, verbose=params.verbose) if (params.pickle) : file_name = os.path.basename( os.path.splitext(params.input.pdb.file_name[0])[0]) + ".pkl" easy_pickle.dump(file_name, process) return process
def run(): quartz_structure = xray.structure( special_position_settings=crystal.special_position_settings( crystal_symmetry=crystal.symmetry( unit_cell=(5.01,5.01,5.47,90,90,120), space_group_symbol="P6222")), scatterers=flex.xray_scatterer([ xray.scatterer( label="Si", site=(1/2.,1/2.,1/3.), u=0.2), xray.scatterer( label="O", site=(0.197,-0.197,0.83333), u=0)])) quartz_structure.show_summary().show_scatterers() from libtbx import easy_pickle easy_pickle.dump("beach", quartz_structure) from libtbx import easy_pickle quartz_structure = easy_pickle.load("beach") for scatterer in quartz_structure.scatterers(): print "%s:" % scatterer.label, "%8.4f %8.4f %8.4f" % scatterer.site site_symmetry = quartz_structure.site_symmetry(scatterer.site) print " point group type:", site_symmetry.point_group_type() print " special position operator:", site_symmetry.special_op_simplified() for table in ["xray", "electron"]: print "Scattering type table:", table reg = quartz_structure.scattering_type_registry(table=table) reg.show_summary() f_calc = quartz_structure.structure_factors(d_min=2).f_calc() f_calc.show_summary().show_array() f_calc.d_spacings().show_array() low_resolution_only = f_calc.select(f_calc.d_spacings().data() > 2.5) low_resolution_only.show_array() print
def process(work_params, i_calc): from cctbx.miller import reindexing reindexing_assistant = reindexing.assistant( lattice_group=work_params.lattice_symmetry.group(), intensity_group=work_params.intensity_symmetry.group(), miller_indices=i_calc.p1_anom.indices()) reindexing_assistant.show_summary() print image_mdls = build_images(work_params, i_calc.p1_anom, reindexing_assistant) show_vm_info("After build_images():") if (work_params.pickle_image_models): file_name = "%s_image_mdls.pickle" % work_params.base36_timestamp from libtbx import easy_pickle easy_pickle.dump( file_name=file_name, obj=(work_params, i_calc, reindexing_assistant, image_mdls)) show_vm_info("After %s:" % file_name) process_core(work_params, i_calc.p1_anom, reindexing_assistant, image_mdls)