def run(args): from libtbx.option_parser import option_parser command_line = (option_parser( usage="fable.python %s [options]" % __file__) .option(None, "--ifort", action="store_true", default=False) .option(None, "--verbose", action="store_true", default=False) ).process(args=args) keys = set(command_line.args) exercises = set() for key in globals().keys(): if (key.startswith("exercise_")): exercises.add(key[9:]) assert len(keys) == 0 or keys.issubset(exercises) co = command_line.options from libtbx.utils import show_times_at_exit show_times_at_exit() if (len(keys) == 0 or "open" in keys): exercise_open(opts=co) if (len(keys) == 0 or "mixed_read_write" in keys): exercise_mixed_read_write(opts=co) if (len(keys) == 0 or "read_from_non_existing_file" in keys): exercise_read_from_non_existing_file(opts=co) print "OK"
def run(args): assert len(args) in [0,2] if (len(args) == 0): n_trials = 3 n_dynamics_steps = 30 out = null_out() else: n_trials = max(1, int(args[0])) n_dynamics_steps = max(1, int(args[1])) out = sys.stdout show_times_at_exit() if (1): exercise_six_dof( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) if (1): exercise_six_dof2( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) if (1): exercise_spherical( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) if (1): exercise_revolute( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) if (1): exercise_revolute2( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) print "OK"
def run(args): assert args in [[], ["--verbose"]] verbose = (args == ["--verbose"]) from libtbx.utils import show_times_at_exit show_times_at_exit() all = True if (0 or all): exercise(verbose, file_names_cpp=["top.cpp", "functions.cpp"], number_of_function_files=1) if (0 or all): exercise(verbose, file_names_cpp=["top.cpp", "subs.cpp"], separate_files_separate_namespace={"subs": ["sub1", "sub2"]}) if (0 or all): exercise(verbose, file_names_cpp=["top.cpp", "subs.cpp", "functions.cpp"], number_of_function_files=1, separate_files_separate_namespace={"subs": ["sub1", "sub2"]}) if (0 or all): exercise(verbose, file_names_cpp=["top.cpp", "subs.cpp"], separate_files_main_namespace={"subs": ["sub1", "sub2"]}) if (0 or all): exercise(verbose, file_names_cpp=["top.cpp", "subs.cpp", "functions.cpp"], number_of_function_files=1, separate_files_main_namespace={"subs": ["sub1", "sub2"]}) print "OK"
def run(args): from libtbx.option_parser import option_parser command_line = (option_parser( usage="fable.python %s [options]" % __file__).option( None, "--ifort", action="store_true", default=False).option(None, "--verbose", action="store_true", default=False)).process(args=args) keys = set(command_line.args) exercises = set() for key in globals().keys(): if (key.startswith("exercise_")): exercises.add(key[9:]) assert len(keys) == 0 or keys.issubset(exercises) co = command_line.options from libtbx.utils import show_times_at_exit show_times_at_exit() if (len(keys) == 0 or "open" in keys): exercise_open(opts=co) if (len(keys) == 0 or "mixed_read_write" in keys): exercise_mixed_read_write(opts=co) if (len(keys) == 0 or "read_from_non_existing_file" in keys): exercise_read_from_non_existing_file(opts=co) print("OK")
def run(): debug_import = "--debug=import" in sys.argv[1:] def show_traceback(): if (debug_import): import traceback print >> sys.stderr traceback.print_exc() print >> sys.stderr engine_path = find_scons_engine_path() if (engine_path is not None): sys.path.insert(0, engine_path) try: import SCons except ImportError: show_traceback() del sys.path[0] try: import SCons.Script except ImportError: show_traceback() msg = ["SCons is not available.", " A possible solution is to unpack a SCons distribution in", " one of these directories:"] for path in libtbx.env.repository_paths: msg.append(" " + show_string(abs(path))) msg.extend([ " SCons distributions are available at this location:", " http://www.scons.org/", " It may be necessary to rename the unpacked distribution, e.g.:", " mv scons-0.96.1 scons"]) raise Sorry("\n".join(msg)) import SCons.Script.Main if (hasattr(SCons.Script.Main, "fetch_win32_parallel_msg")): SCons.Script.Main.fetch_win32_parallel_msg = dummy_fetch_win32_parallel_msg show_times_at_exit() SCons.Script.main()
def run(args): from libtbx.option_parser import option_parser command_line = (option_parser( usage="fable.python %s [options] regex_pattern ..." % __file__).enable_multiprocessing().option( None, "--dry_run", action="store_true", default=False).option( None, "--valgrind", action="store_true", default=False).option( None, "--ifort", action="store_true", default=False).option( None, "--keep_going", action="store_true", default=False).option( None, "--pch", action="store_true", default=False).option( None, "--verbose", action="store_true", default=False)).process(args=args) from libtbx.utils import show_times_at_exit show_times_at_exit() n_failures = exercise_compile_valid(regex_patterns=command_line.args, opts=command_line.options) if (n_failures != 0): print "Done." else: print "OK"
def run(args): from libtbx.option_parser import option_parser command_line = (option_parser( usage="fable.python %s [options] regex_pattern ..." % __file__) .enable_multiprocessing() .option(None, "--dry_run", action="store_true", default=False) .option(None, "--valgrind", action="store_true", default=False) .option(None, "--ifort", action="store_true", default=False) .option(None, "--keep_going", action="store_true", default=False) .option(None, "--pch", action="store_true", default=False) .option(None, "--verbose", action="store_true", default=False) ).process(args=args) from libtbx.utils import show_times_at_exit show_times_at_exit() n_failures = exercise_compile_valid( regex_patterns=command_line.args, opts=command_line.options) if (n_failures != 0): print "Done." else: print "OK"
def run(args): assert len(args) in [0,2] if (len(args) == 0): n_trials = 3 n_dynamics_steps = 30 out = null_out() else: n_trials = max(1, int(args[0])) n_dynamics_steps = max(1, int(args[1])) out = sys.stdout show_times_at_exit() if (1): exercise_six_dof( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) if (1): exercise_six_dof2( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) if (1): exercise_spherical( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) if (1): exercise_revolute( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) if (1): exercise_revolute2( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps) print("OK")
def run(args): from libtbx.utils import show_times_at_exit show_times_at_exit() from rstbx.simage import create work_params = create.process_args( args=args, extra_phil_str="""\ use_symmetry = False .type = bool number_of_shots = None .type = int min_count_target = None .type = int usable_partiality_threshold = 0.1 .type = float kirian_delta_vs_ewald_proximity = False .type = bool multiprocessing = False .type = bool xy_prefix = None .type = str plot = completeness redundancy .type = choice """) i_calc = create.build_i_calc(work_params) i_calc.p1_anom.show_comprehensive_summary() print sys.stdout.flush() stats = simulate(work_params, i_calc) stats.report(plot=work_params.plot, xy_prefix=work_params.xy_prefix)
def run(args): from libtbx.utils import show_times_at_exit show_times_at_exit() from rstbx.simage import create work_params = create.process_args(args=args, extra_phil_str="""\ use_symmetry = False .type = bool number_of_shots = None .type = int min_count_target = None .type = int usable_partiality_threshold = 0.1 .type = float kirian_delta_vs_ewald_proximity = False .type = bool multiprocessing = False .type = bool xy_prefix = None .type = str plot = completeness redundancy .type = choice """) i_calc = create.build_i_calc(work_params) i_calc.p1_anom.show_comprehensive_summary() print sys.stdout.flush() stats = simulate(work_params, i_calc) stats.report(plot=work_params.plot, xy_prefix=work_params.xy_prefix)
def run(args): assert len(args) == 0 from libtbx.utils import show_times_at_exit show_times_at_exit() exercise_image_simple() exercise_combine_rgb_images() exercise_create() exercise_explore_completeness() exercise_solver() print("OK")
def run(args): assert len(args) == 0 from libtbx.utils import show_times_at_exit show_times_at_exit() exercise_image_simple() exercise_combine_rgb_images() exercise_create() exercise_explore_completeness() exercise_solver() print "OK"
def run(args): assert len(args) in [0,1] if (len(args) == 0): n_dynamics_steps = 30 out = null_out() else: n_dynamics_steps = max(1, int(args[0])) out = sys.stdout show_times_at_exit() for sim_factory in simulation_factories: sim = sim_factory() exercise_dynamics_quick( out=out, sim=sim, n_dynamics_steps=n_dynamics_steps) exercise_minimization_quick(out=out, sim=sim) print "OK"
def run(args): assert len(args) in [0, 1] if (len(args) == 0): n_dynamics_steps = 30 out = null_out() else: n_dynamics_steps = max(1, int(args[0])) out = sys.stdout show_times_at_exit() for sim_factory in simulation_factories: sim = sim_factory() exercise_dynamics_quick(out=out, sim=sim, n_dynamics_steps=n_dynamics_steps) exercise_minimization_quick(out=out, sim=sim) print "OK"
def run(args,multiplier): show_times_at_exit() verbose = '--verbose' in args use_random_u_iso = '--use_random_u_iso' in args #count from 1hmg.pdb, chain A: C, 1583; N, 445; O, 495, S, 13 elements = ['O']*19 + ['N']*18 + ['C']*62 + ['S']*1 allelements = elements*multiplier if 0: for sn in xrange(1,231): try: sgi = sgtbx.space_group_info(sn) print "Space group",sgi,"number",sn exercise_direct(sgi, allelements, use_random_u_iso=use_random_u_iso, verbose=verbose) except Exception, e: print e return
def run(args): assert len(args) in [0, 1] if (len(args) == 0): n_dynamics_steps = 100 out = null_out() else: n_dynamics_steps = max(1, int(args[0])) out = sys.stdout show_times_at_exit() # exercise_accumulate_in_each_tree() exercise_near_singular_hinges() exercise_fixed_vertices_special_cases() # if (1): for i in range(n_test_models): print("test model index:", i, file=out) tardy_model = get_test_model_by_index(i=i) exercise_with_tardy_model(out=out, tardy_model=tardy_model, n_dynamics_steps=n_dynamics_steps) if (i == 0): assert tardy_model.degrees_of_freedom == 3 fixed_vertices = [0] print("test model index:", i, \ "fixed_vertices:", fixed_vertices, file=out) tardy_model = get_test_model_by_index( i=i, fixed_vertex_lists=[fixed_vertices]) assert tardy_model.degrees_of_freedom == 0 elif (i == 5): assert tardy_model.degrees_of_freedom == 11 for fixed_vertices,expected_dof in \ test_case_5_fixed_vertices_expected_dof: print("test model index:", i, \ "fixed_vertices:", fixed_vertices, file=out) tardy_model = get_test_model_by_index( i=i, fixed_vertex_lists=[fixed_vertices]) assert tardy_model.degrees_of_freedom == expected_dof exercise_with_tardy_model( out=out, tardy_model=tardy_model, n_dynamics_steps=n_dynamics_steps) # print("OK")
def run(args): assert len(args) in [0,1] if (len(args) == 0): n_dynamics_steps = 100 out = null_out() else: n_dynamics_steps = max(1, int(args[0])) out = sys.stdout show_times_at_exit() # exercise_accumulate_in_each_tree() exercise_near_singular_hinges() exercise_fixed_vertices_special_cases() # if (1): for i in xrange(n_test_models): print >> out, "test model index:", i tardy_model = get_test_model_by_index(i=i) exercise_with_tardy_model( out=out, tardy_model=tardy_model, n_dynamics_steps=n_dynamics_steps) if (i == 0): assert tardy_model.degrees_of_freedom == 3 fixed_vertices = [0] print >> out, "test model index:", i, \ "fixed_vertices:", fixed_vertices tardy_model = get_test_model_by_index( i=i, fixed_vertex_lists=[fixed_vertices]) assert tardy_model.degrees_of_freedom == 0 elif (i == 5): assert tardy_model.degrees_of_freedom == 11 for fixed_vertices,expected_dof in \ test_case_5_fixed_vertices_expected_dof: print >> out, "test model index:", i, \ "fixed_vertices:", fixed_vertices tardy_model = get_test_model_by_index( i=i, fixed_vertex_lists=[fixed_vertices]) assert tardy_model.degrees_of_freedom == expected_dof exercise_with_tardy_model( out=out, tardy_model=tardy_model, n_dynamics_steps=n_dynamics_steps) # print "OK"
def run(args, multiplier): show_times_at_exit() verbose = '--verbose' in args use_random_u_iso = '--use_random_u_iso' in args #count from 1hmg.pdb, chain A: C, 1583; N, 445; O, 495, S, 13 elements = ['O'] * 19 + ['N'] * 18 + ['C'] * 62 + ['S'] * 1 allelements = elements * multiplier if 0: for sn in xrange(1, 231): try: sgi = sgtbx.space_group_info(sn) print "Space group", sgi, "number", sn exercise_direct(sgi, allelements, use_random_u_iso=use_random_u_iso, verbose=verbose) except Exception, e: print e return
def run(): debug_import = "--debug=import" in sys.argv[1:] def show_traceback(): if (debug_import): import traceback print >> sys.stderr traceback.print_exc() print >> sys.stderr engine_path = find_scons_engine_path() if (engine_path is not None): sys.path.insert(0, engine_path) try: import SCons except ImportError: show_traceback() del sys.path[0] try: import SCons.Script except ImportError: show_traceback() msg = [ "SCons is not available.", " A possible solution is to unpack a SCons distribution in", " one of these directories:" ] for path in libtbx.env.repository_paths: msg.append(" " + show_string(abs(path))) msg.extend([ " SCons distributions are available at this location:", " http://www.scons.org/", " It may be necessary to rename the unpacked distribution, e.g.:", " mv scons-0.96.1 scons" ]) raise Sorry("\n".join(msg)) import SCons.Script.Main if (hasattr(SCons.Script.Main, "fetch_win32_parallel_msg")): SCons.Script.Main.fetch_win32_parallel_msg = dummy_fetch_win32_parallel_msg show_times_at_exit() SCons.Script.main()
def run(args): assert len(args) in [0,3] if (len(args) == 0): n_trials = 3 n_dynamics_steps = 30 random_seed = 0 out = null_out() else: n_trials = max(1, int(args[0])) n_dynamics_steps = max(1, int(args[1])) random_seed = int(args[2]) out = sys.stdout show_times_at_exit() mersenne_twister = flex.mersenne_twister(seed=0) exercise_euler_params_qE_as_euler_angles_xyz_qE( mersenne_twister=mersenne_twister) exercise_T_as_X(mersenne_twister=mersenne_twister) global plot_prefix plot_prefix = random_seed exercise_simulation( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps, random_seed=random_seed) print "OK"
def run(args): assert len(args) in [0,3] if (len(args) == 0): n_trials = 3 n_dynamics_steps = 30 random_seed = 0 out = null_out() else: n_trials = max(1, int(args[0])) n_dynamics_steps = max(1, int(args[1])) random_seed = int(args[2]) out = sys.stdout show_times_at_exit() mersenne_twister = flex.mersenne_twister(seed=0) exercise_euler_params_qE_as_euler_angles_xyz_qE( mersenne_twister=mersenne_twister) exercise_T_as_X(mersenne_twister=mersenne_twister) global plot_prefix plot_prefix = random_seed exercise_simulation( out=out, n_trials=n_trials, n_dynamics_steps=n_dynamics_steps, random_seed=random_seed) print("OK")
def run(args, multiplier): show_times_at_exit() verbose = '--verbose' in args use_random_u_iso = '--use_random_u_iso' in args #count from 1hmg.pdb, chain A: C, 1583; N, 445; O, 495, S, 13 elements = ['O'] * 19 + ['N'] * 18 + ['C'] * 62 + ['S'] * 1 allelements = elements * multiplier if 0: for sn in range(1, 231): try: sgi = sgtbx.space_group_info(sn) print("Space group", sgi, "number", sn) exercise_direct(sgi, allelements, use_random_u_iso=use_random_u_iso, verbose=verbose) except Exception as e: print(e) return if 0: for symbol in ["P1", "P3", "P41", "P212121", "I41", "F432"]: sgi = sgtbx.space_group_info(symbol) print("Space group", sgi) exercise_direct(sgi, allelements, use_random_u_iso=use_random_u_iso, verbose=verbose) if 1: sgi = sgtbx.space_group_info("P1") print("Space group", sgi) exercise_direct(sgi, allelements, use_random_u_iso=use_random_u_iso, verbose=verbose)
def exercise(): data_dir = os.path.join(dials_regression, "centroid_test_data") cwd = os.path.abspath(os.curdir) tmp_dir = os.path.abspath(open_tmp_directory()) print tmp_dir os.chdir(tmp_dir) g = glob.glob(os.path.join(data_dir, "*.cbf")) assert len(g) == 9 cmd = "dials.merge_cbf %s merge_n_images=3" % (" ".join(g)) print cmd result = easy_run.fully_buffered(cmd).raise_if_errors() g = glob.glob(os.path.join(tmp_dir, "sum_*.cbf")) assert len(g) == 3 def run(args): if not have_dials_regression: print "Skipping tst_merge_cbf.py: dials_regression not available" return exercise() if __name__ == '__main__': import sys from libtbx.utils import show_times_at_exit show_times_at_exit() run(sys.argv[1:])
assert mtz_object.space_group().type().lookup_symbol() == "P 41 21 2" assert approx_equal(mtz_object.n_reflections(), 7446, eps=2e3) os.chdir(cwd) def run(args): if not have_xia2_regression: print "Skipping tst_scan_varying_integration_bug.py: xia2_regression not available" return if not have_test_data: print "Skipping tst_scan_varying_integration_bug.py: xia2_regression " + \ "test data not available. Please run " + \ "xia2_regression.fetch_test_data first" return exercises = (exercise_1,) if len(args): args = [int(arg) for arg in args] for arg in args: assert arg > 0 exercises = [exercises[arg-1] for arg in args] for exercise in exercises: exercise() if __name__ == '__main__': import sys from libtbx.utils import show_times_at_exit show_times_at_exit() run(sys.argv[1:])
def run(args): local_master_phil = get_master_phil() argument_interpreter = local_master_phil.command_line_argument_interpreter( ) phil_objects = [] for arg in args: phil_objects.append(argument_interpreter.process(arg=arg)) local_params = local_master_phil.fetch(sources=phil_objects).extract() chunk = chunk_manager(n=local_params.chunk[0], i=local_params.chunk[1]).easy_all() local_master_phil.format(local_params).show() print # assert local_params.pdb_file is not None assert op.isfile(local_params.pdb_file) # tst_tardy_pdb_master_phil = tst_tardy_pdb.get_master_phil() tst_tardy_pdb_params = tst_tardy_pdb_master_phil.extract() tst_tardy_pdb_params.tardy_displacements = Auto tst_tardy_pdb_params.tardy_displacements_auto.parameterization \ = local_params.random_displacements_parameterization if (local_params.algorithm == "minimization"): parameter_trial_table = common_parameter_trial_table elif (local_params.algorithm == "annealing"): parameter_trial_table = annealing_parameter_trial_table else: raise AssertionError cp_n_trials = number_of_trials(table=parameter_trial_table) print "Number of parameter trials:", cp_n_trials print "parameter_trial_table:" pprint.pprint(parameter_trial_table) print # show_times_at_exit() # params_shown_once_already = False tst_tardy_pdb_log_shown_once_already = False for cp_i_trial in xrange(cp_n_trials): if (chunk.skip_iteration(i=cp_i_trial)): continue print "cp_i_trial: %d / %d = %.2f %%" % (cp_i_trial, cp_n_trials, 100 * (cp_i_trial + 1) / cp_n_trials) if (local_params.verbose): print sys.stdout.flush() set_parameters(params=tst_tardy_pdb_params, trial_table=parameter_trial_table, cp_i_trial=cp_i_trial) if (local_params.algorithm == "minimization"): if (local_params.orca_experiments): tst_tardy_pdb_params.keep_all_restraints = True if (tst_tardy_pdb_params.emulate_cartesian): tst_tardy_pdb_params.orca_experiments = False else: tst_tardy_pdb_params.orca_experiments = True tst_tardy_pdb_params.number_of_cooling_steps = 0 tst_tardy_pdb_params.minimization_max_iterations = None elif (local_params.algorithm == "annealing"): tst_tardy_pdb_params.number_of_time_steps = 1 tst_tardy_pdb_params.time_step_pico_seconds = 0.001 tst_tardy_pdb_params.minimization_max_iterations = 0 else: raise AssertionError for random_seed in xrange(local_params.number_of_random_trials): tst_tardy_pdb_params.random_seed = random_seed tst_tardy_pdb_params.dihedral_function_type \ = local_params.dihedral_function_type if (local_params.verbose or not params_shown_once_already): params_shown_once_already = True tst_tardy_pdb_master_phil.format(tst_tardy_pdb_params).show() print sys.stdout.flush() if (local_params.hot): if (local_params.verbose): tst_tardy_pdb_log = sys.stdout else: tst_tardy_pdb_log = StringIO() coll = collector() try: tst_tardy_pdb.run_test(params=tst_tardy_pdb_params, pdb_files=[local_params.pdb_file], other_files=[], callback=coll, log=tst_tardy_pdb_log) except KeyboardInterrupt: raise except Exception: print print "tst_tardy_pdb_params leading to exception:" print tst_tardy_pdb_master_phil.format( tst_tardy_pdb_params).show() print if (not local_params.verbose): sys.stdout.write(tst_tardy_pdb_log.getvalue()) sys.stdout.flush() if (not local_params.keep_going): raise report_exception( context_info="cp_i_trial=%d, random_seed=%d" % (cp_i_trial, random_seed)) else: if (not local_params.verbose and not tst_tardy_pdb_log_shown_once_already): tst_tardy_pdb_log_shown_once_already = True sys.stdout.write(tst_tardy_pdb_log.getvalue()) print "RESULT_cp_i_trial_random_seed_rmsd:", \ cp_i_trial, random_seed, list(coll.rmsd) sys.stdout.flush() first_pass = False if (local_params.hot): print
def run(args): local_master_phil = get_master_phil() argument_interpreter = local_master_phil.command_line_argument_interpreter() phil_objects = [] for arg in args: phil_objects.append(argument_interpreter.process(arg=arg)) local_params = local_master_phil.fetch(sources=phil_objects).extract() chunk = chunk_manager( n=local_params.chunk[0], i=local_params.chunk[1]).easy_all() local_master_phil.format(local_params).show() print # assert local_params.pdb_file is not None assert op.isfile(local_params.pdb_file) # tst_tardy_pdb_master_phil = tst_tardy_pdb.get_master_phil() tst_tardy_pdb_params = tst_tardy_pdb_master_phil.extract() tst_tardy_pdb_params.tardy_displacements = Auto tst_tardy_pdb_params.tardy_displacements_auto.parameterization \ = local_params.random_displacements_parameterization if (local_params.algorithm == "minimization"): parameter_trial_table = common_parameter_trial_table elif (local_params.algorithm == "annealing"): parameter_trial_table = annealing_parameter_trial_table else: raise AssertionError cp_n_trials = number_of_trials(table=parameter_trial_table) print "Number of parameter trials:", cp_n_trials print "parameter_trial_table:" pprint.pprint(parameter_trial_table) print # show_times_at_exit() # params_shown_once_already = False tst_tardy_pdb_log_shown_once_already = False for cp_i_trial in xrange(cp_n_trials): if (chunk.skip_iteration(i=cp_i_trial)): continue print "cp_i_trial: %d / %d = %.2f %%" % ( cp_i_trial, cp_n_trials, 100 * (cp_i_trial+1) / cp_n_trials) if (local_params.verbose): print sys.stdout.flush() set_parameters( params=tst_tardy_pdb_params, trial_table=parameter_trial_table, cp_i_trial=cp_i_trial) if (local_params.algorithm == "minimization"): if (local_params.orca_experiments): tst_tardy_pdb_params.keep_all_restraints = True if (tst_tardy_pdb_params.emulate_cartesian): tst_tardy_pdb_params.orca_experiments = False else: tst_tardy_pdb_params.orca_experiments = True tst_tardy_pdb_params.number_of_cooling_steps = 0 tst_tardy_pdb_params.minimization_max_iterations = None elif (local_params.algorithm == "annealing"): tst_tardy_pdb_params.number_of_time_steps = 1 tst_tardy_pdb_params.time_step_pico_seconds = 0.001 tst_tardy_pdb_params.minimization_max_iterations = 0 else: raise AssertionError for random_seed in xrange(local_params.number_of_random_trials): tst_tardy_pdb_params.random_seed = random_seed tst_tardy_pdb_params.dihedral_function_type \ = local_params.dihedral_function_type if (local_params.verbose or not params_shown_once_already): params_shown_once_already = True tst_tardy_pdb_master_phil.format(tst_tardy_pdb_params).show() print sys.stdout.flush() if (local_params.hot): if (local_params.verbose): tst_tardy_pdb_log = sys.stdout else: tst_tardy_pdb_log = StringIO() coll = collector() try: tst_tardy_pdb.run_test( params=tst_tardy_pdb_params, pdb_files=[local_params.pdb_file], other_files=[], callback=coll, log=tst_tardy_pdb_log) except KeyboardInterrupt: raise except Exception: print print "tst_tardy_pdb_params leading to exception:" print tst_tardy_pdb_master_phil.format(tst_tardy_pdb_params).show() print if (not local_params.verbose): sys.stdout.write(tst_tardy_pdb_log.getvalue()) sys.stdout.flush() if (not local_params.keep_going): raise report_exception( context_info="cp_i_trial=%d, random_seed=%d" % ( cp_i_trial, random_seed)) else: if ( not local_params.verbose and not tst_tardy_pdb_log_shown_once_already): tst_tardy_pdb_log_shown_once_already = True sys.stdout.write(tst_tardy_pdb_log.getvalue()) print "RESULT_cp_i_trial_random_seed_rmsd:", \ cp_i_trial, random_seed, list(coll.rmsd) sys.stdout.flush() first_pass = False if (local_params.hot): print
def run(args): assert len(args) in [0, 2], "n_sites, n_trials" if (len(args) == 0): n_sites, n_trials = 3, 2 out = null_out() else: n_sites, n_trials = [int(arg) for arg in args] out = sys.stdout # show_times_at_exit() class type_info(object): def __init__(O, type, use_analytical_gradients): O.type = type O.use_analytical_gradients = use_analytical_gradients def __str__(O): return "%s(use_analytical_gradients=%s)" % ( O.type.__name__, str(O.use_analytical_gradients)) spherical_types = [ type_info(euler_params, False), type_info(euler_params, True), type_info(euler_angles_xyz, False), type_info(euler_angles_xyz, True), type_info(euler_angles_zxz, False), type_info(euler_angles_zxz, True), type_info(euler_angles_yxyz, False), type_info(euler_angles_xyzy, False), type_info(inf_euler_params, False), type_info(inf_axis_angle, False) ] nfun_accu = {} n_failed = {} for ti in spherical_types: nfun_accu[str(ti)] = flex.size_t() n_failed[str(ti)] = 0 mersenne_twister = flex.mersenne_twister(seed=0) for i_trial in xrange(n_trials): sites = [ matrix.col(s) for s in flex.vec3_double( mersenne_twister.random_double(size=n_sites * 3) * 2 - 1) ] c = center_of_mass_from_sites(sites) r = matrix.sqr(mersenne_twister.random_double_r3_rotation_matrix()) wells = [r * (s - c) + c for s in sites] for ti in spherical_types: r = refinery(spherical_type_info=ti, sites=sites, wells=wells, out=out) nfun_accu[str(ti)].append(r.nfun) if (r.failed): n_failed[str(ti)] += 1 nfun_sums = [] annotations = [] for ti in spherical_types: print >> out, ti nfuns = nfun_accu[str(ti)] stats = nfuns.as_double().min_max_mean() stats.show(out=out, prefix=" ") nfun_sums.append((str(ti), flex.sum(nfuns))) if (n_failed[str(ti)] == 0): annotations.append(None) else: annotations.append("failed: %d" % n_failed[str(ti)]) print >> out show_sorted_by_counts(label_count_pairs=nfun_sums, reverse=False, out=out, annotations=annotations) print >> out print "OK"
def run(args): assert len(args) in [0,2], "n_sites, n_trials" if (len(args) == 0): n_sites, n_trials = 3, 2 out = null_out() else: n_sites, n_trials = [int(arg) for arg in args] out = sys.stdout # show_times_at_exit() class type_info(object): def __init__(O, type, use_analytical_gradients): O.type = type O.use_analytical_gradients = use_analytical_gradients def __str__(O): return "%s(use_analytical_gradients=%s)" % ( O.type.__name__, str(O.use_analytical_gradients)) spherical_types = [ type_info(euler_params, False), type_info(euler_params, True), type_info(euler_angles_xyz, False), type_info(euler_angles_xyz, True), type_info(euler_angles_zxz, False), type_info(euler_angles_zxz, True), type_info(euler_angles_yxyz, False), type_info(euler_angles_xyzy, False), type_info(inf_euler_params, False), type_info(inf_axis_angle, False)] nfun_accu = {} n_failed = {} for ti in spherical_types: nfun_accu[str(ti)] = flex.size_t() n_failed[str(ti)] = 0 mersenne_twister = flex.mersenne_twister(seed=0) for i_trial in xrange(n_trials): sites = [matrix.col(s) for s in flex.vec3_double( mersenne_twister.random_double(size=n_sites*3)*2-1)] c = center_of_mass_from_sites(sites) r = matrix.sqr(mersenne_twister.random_double_r3_rotation_matrix()) wells = [r*(s-c)+c for s in sites] for ti in spherical_types: r = refinery(spherical_type_info=ti, sites=sites, wells=wells, out=out) nfun_accu[str(ti)].append(r.nfun) if (r.failed): n_failed[str(ti)] += 1 nfun_sums = [] annotations = [] for ti in spherical_types: print >> out, ti nfuns = nfun_accu[str(ti)] stats = nfuns.as_double().min_max_mean() stats.show(out=out, prefix=" ") nfun_sums.append((str(ti), flex.sum(nfuns))) if (n_failed[str(ti)] == 0): annotations.append(None) else: annotations.append("failed: %d" % n_failed[str(ti)]) print >> out show_sorted_by_counts( label_count_pairs=nfun_sums, reverse=False, out=out, annotations=annotations) print >> out print "OK"