def __init__(self,datadir,n_frame,transmittance,apply_noise,plot=False,esd_plot=False,half_data_flag=0): # read the ground truth values back in import cPickle as pickle ordered_intensities = pickle.load(open(os.path.join(datadir,"intensities.pickle"),"rb")) frames = pickle.load(open(os.path.join(datadir,"frames.pickle"),"rb")) sim = pickle.load(open(os.path.join(datadir,"simulated%05d_0.pickle"%n_frame),"rb")) print "accepted obs %d"%(len(sim["observed_intensity"])) FSIM = prepare_simulation_with_noise(sim, transmittance=transmittance, apply_noise=apply_noise, ordered_intensities=ordered_intensities, half_data_flag=half_data_flag) I,I_visited,G,G_visited = I_and_G_base_estimate(FSIM) model_I = ordered_intensities.data()[0:len(I)] model_G = frames["scale_factors"][0:len(G)] model_B = frames["B_factors"][0:len(G)] T = Timer("%d frames, %f transmittance, %s noise"%( n_frame, transmittance, {False:"NO", True:"YES"}[apply_noise])) mapper = mapper_factory(xscale6e) minimizer = mapper(I,G,I_visited,G_visited,FSIM) del T minimizer.show_summary() Fit = minimizer.e_unpack() show_correlation(Fit["G"],model_G,G_visited,"Correlation of G:") show_correlation(Fit["B"],model_B,G_visited,"Correlation of B:") show_correlation(Fit["I"],model_I,I_visited,"Correlation of I:") Fit_stddev = minimizer.e_unpack_stddev() if plot: plot_it(Fit["G"], model_G, mode="G") plot_it(Fit["B"], model_B, mode="B") plot_it(Fit["I"], model_I, mode="I") print if esd_plot: minimizer.esd_plot() from cctbx.examples.merging.show_results import show_overall_observations table1,self.n_bins,self.d_min = show_overall_observations( Fit["I"],Fit_stddev["I"],I_visited, ordered_intensities,FSIM,title="Statistics for all reflections") self.FSIM=FSIM self.ordered_intensities=ordered_intensities self.Fit_I=Fit["I"] self.Fit_I_stddev=Fit_stddev["I"] self.I_visited=I_visited
def __init__(self,datadir,work_params,plot=False,esd_plot=False,half_data_flag=0): casetag = work_params.output.prefix # read the ground truth values back in import cPickle as pickle # it is assumed (for now) that the reference millers contain a complete asymmetric unit # of indices, within the (d_max,d_min) region of interest and possibly outside the region. reference_millers = pickle.load(open(os.path.join(datadir,casetag+"_miller.pickle"),"rb")) experiment_manager = read_experiments(work_params) obs = pickle.load(open(os.path.join(datadir,casetag+"_observation.pickle"),"rb")) print "Read in %d observations"%(len(obs["observed_intensity"])) reference_millers.show_summary(prefix="Miller index file ") print len(obs["frame_lookup"]),len(obs["observed_intensity"]), flex.max(obs['miller_lookup']),flex.max(obs['frame_lookup']) max_frameno = flex.max(obs["frame_lookup"]) from iotbx import mtz mtz_object = mtz.object(file_name=work_params.scaling.mtz_file) #for array in mtz_object.as_miller_arrays(): # this_label = array.info().label_string() # print this_label, array.observation_type() I_sim = mtz_object.as_miller_arrays()[0].as_intensity_array() I_sim.show_summary() MODEL_REINDEX_OP = work_params.model_reindex_op I_sim = I_sim.change_basis(MODEL_REINDEX_OP).map_to_asu() #match up isomorphous (the simulated fake F's) with experimental unique set matches = miller.match_multi_indices( miller_indices_unique=reference_millers.indices(), miller_indices=I_sim.indices()) print "original unique",len(reference_millers.indices()) print "isomorphous set",len(I_sim.indices()) print "pairs",len(matches.pairs()) iso_data = flex.double(len(reference_millers.indices())) for pair in matches.pairs(): iso_data[pair[0]] = I_sim.data()[pair[1]] reference_data = miller.array(miller_set = reference_millers, data = iso_data) reference_data.set_observation_type_xray_intensity() FOBS = prepare_observations_for_scaling(work_params,obs=obs, reference_intensities=reference_data, files = experiment_manager.get_files(), half_data_flag=half_data_flag) I,I_visited,G,G_visited = I_and_G_base_estimate(FOBS,params=work_params) print "I length",len(I), "G length",len(G), "(Reference set; entire asymmetric unit)" assert len(reference_data.data()) == len(I) #presumably these assertions fail when half data are taken for CC1/2 or d_min is cut model_I = reference_data.data()[0:len(I)] T = Timer("%d frames"%(len(G), )) mapper = mapper_factory(xscale6e) minimizer = mapper(I,G,I_visited,G_visited,FOBS,params=work_params, experiments=experiment_manager.get_experiments()) del T minimizer.show_summary() Fit = minimizer.e_unpack() Gstats=flex.mean_and_variance(Fit["G"].select(G_visited==1)) print "G mean and standard deviation:",Gstats.mean(),Gstats.unweighted_sample_standard_deviation() if "Bfactor" in work_params.levmar.parameter_flags: Bstats=flex.mean_and_variance(Fit["B"].select(G_visited==1)) print "B mean and standard deviation:",Bstats.mean(),Bstats.unweighted_sample_standard_deviation() show_correlation(Fit["I"],model_I,I_visited,"Correlation of I:") Fit_stddev = minimizer.e_unpack_stddev() # XXX FIXME known bug: the length of Fit["G"] could be smaller than the length of experiment_manager.get_files() # Not sure if this has any operational drawbacks. It's a result of half-dataset selection. if plot: plot_it(Fit["I"], model_I, mode="I") if "Rxy" in work_params.levmar.parameter_flags: show_histogram(Fit["Ax"],"Histogram of x rotation (degrees)") show_histogram(Fit["Ay"],"Histogram of y rotation (degrees)") print if esd_plot: minimizer.esd_plot() from cctbx.examples.merging.show_results import show_overall_observations table1,self.n_bins,self.d_min = show_overall_observations( Fit["I"],Fit_stddev["I"],I_visited, reference_data,FOBS,title="Statistics for all reflections", work_params = work_params) self.FSIM=FOBS self.ordered_intensities=reference_data self.reference_millers=reference_millers self.Fit_I=Fit["I"] self.Fit_I_stddev=Fit_stddev["I"] self.I_visited=I_visited self.Fit = Fit self.experiments = experiment_manager
def __init__(self, datadir, n_frame, transmittance, apply_noise, plot=False, esd_plot=False, half_data_flag=0): # read the ground truth values back in import cPickle as pickle ordered_intensities = pickle.load( open(os.path.join(datadir, "intensities.pickle"), "rb")) frames = pickle.load(open(os.path.join(datadir, "frames.pickle"), "rb")) sim = pickle.load( open(os.path.join(datadir, "simulated%05d_0.pickle" % n_frame), "rb")) print "accepted obs %d" % (len(sim["observed_intensity"])) FSIM = prepare_simulation_with_noise( sim, transmittance=transmittance, apply_noise=apply_noise, ordered_intensities=ordered_intensities, half_data_flag=half_data_flag) I, I_visited, G, G_visited = I_and_G_base_estimate(FSIM) model_I = ordered_intensities.data()[0:len(I)] model_G = frames["scale_factors"][0:len(G)] model_B = frames["B_factors"][0:len(G)] T = Timer("%d frames, %f transmittance, %s noise" % (n_frame, transmittance, { False: "NO", True: "YES" }[apply_noise])) mapper = mapper_factory(xscale6e) minimizer = mapper(I, G, I_visited, G_visited, FSIM) del T minimizer.show_summary() Fit = minimizer.e_unpack() show_correlation(Fit["G"], model_G, G_visited, "Correlation of G:") show_correlation(Fit["B"], model_B, G_visited, "Correlation of B:") show_correlation(Fit["I"], model_I, I_visited, "Correlation of I:") Fit_stddev = minimizer.e_unpack_stddev() if plot: plot_it(Fit["G"], model_G, mode="G") plot_it(Fit["B"], model_B, mode="B") plot_it(Fit["I"], model_I, mode="I") print if esd_plot: minimizer.esd_plot() from cctbx.examples.merging.show_results import show_overall_observations table1, self.n_bins, self.d_min = show_overall_observations( Fit["I"], Fit_stddev["I"], I_visited, ordered_intensities, FSIM, title="Statistics for all reflections") self.FSIM = FSIM self.ordered_intensities = ordered_intensities self.Fit_I = Fit["I"] self.Fit_I_stddev = Fit_stddev["I"] self.I_visited = I_visited
def __init__(self, datadir, work_params, plot=False, esd_plot=False, half_data_flag=0): casetag = work_params.output.prefix # read the ground truth values back in import cPickle as pickle # it is assumed (for now) that the reference millers contain a complete asymmetric unit # of indices, within the (d_max,d_min) region of interest and possibly outside the region. reference_millers = pickle.load( open(os.path.join(datadir, casetag + "_miller.pickle"), "rb")) experiment_manager = read_experiments(work_params) obs = pickle.load( open(os.path.join(datadir, casetag + "_observation.pickle"), "rb")) print "Read in %d observations" % (len(obs["observed_intensity"])) reference_millers.show_summary(prefix="Miller index file ") print len(obs["frame_lookup"]), len( obs["observed_intensity"]), flex.max( obs['miller_lookup']), flex.max(obs['frame_lookup']) max_frameno = flex.max(obs["frame_lookup"]) from iotbx import mtz mtz_object = mtz.object(file_name=work_params.scaling.mtz_file) #for array in mtz_object.as_miller_arrays(): # this_label = array.info().label_string() # print this_label, array.observation_type() I_sim = mtz_object.as_miller_arrays()[0].as_intensity_array() I_sim.show_summary() MODEL_REINDEX_OP = work_params.model_reindex_op I_sim = I_sim.change_basis(MODEL_REINDEX_OP).map_to_asu() #match up isomorphous (the simulated fake F's) with experimental unique set matches = miller.match_multi_indices( miller_indices_unique=reference_millers.indices(), miller_indices=I_sim.indices()) print "original unique", len(reference_millers.indices()) print "isomorphous set", len(I_sim.indices()) print "pairs", len(matches.pairs()) iso_data = flex.double(len(reference_millers.indices())) for pair in matches.pairs(): iso_data[pair[0]] = I_sim.data()[pair[1]] reference_data = miller.array(miller_set=reference_millers, data=iso_data) reference_data.set_observation_type_xray_intensity() FOBS = prepare_observations_for_scaling( work_params, obs=obs, reference_intensities=reference_data, files=experiment_manager.get_files(), half_data_flag=half_data_flag) I, I_visited, G, G_visited = I_and_G_base_estimate(FOBS, params=work_params) print "I length", len(I), "G length", len( G), "(Reference set; entire asymmetric unit)" assert len(reference_data.data()) == len(I) #presumably these assertions fail when half data are taken for CC1/2 or d_min is cut model_I = reference_data.data()[0:len(I)] T = Timer("%d frames" % (len(G), )) mapper = mapper_factory(xscale6e) minimizer = mapper(I, G, I_visited, G_visited, FOBS, params=work_params, experiments=experiment_manager.get_experiments()) del T minimizer.show_summary() Fit = minimizer.e_unpack() Gstats = flex.mean_and_variance(Fit["G"].select(G_visited == 1)) print "G mean and standard deviation:", Gstats.mean( ), Gstats.unweighted_sample_standard_deviation() if "Bfactor" in work_params.levmar.parameter_flags: Bstats = flex.mean_and_variance(Fit["B"].select(G_visited == 1)) print "B mean and standard deviation:", Bstats.mean( ), Bstats.unweighted_sample_standard_deviation() show_correlation(Fit["I"], model_I, I_visited, "Correlation of I:") Fit_stddev = minimizer.e_unpack_stddev() # XXX FIXME known bug: the length of Fit["G"] could be smaller than the length of experiment_manager.get_files() # Not sure if this has any operational drawbacks. It's a result of half-dataset selection. if plot: plot_it(Fit["I"], model_I, mode="I") if "Rxy" in work_params.levmar.parameter_flags: show_histogram(Fit["Ax"], "Histogram of x rotation (degrees)") show_histogram(Fit["Ay"], "Histogram of y rotation (degrees)") print if esd_plot: minimizer.esd_plot() from cctbx.examples.merging.show_results import show_overall_observations table1, self.n_bins, self.d_min = show_overall_observations( Fit["I"], Fit_stddev["I"], I_visited, reference_data, FOBS, title="Statistics for all reflections", work_params=work_params) self.FSIM = FOBS self.ordered_intensities = reference_data self.reference_millers = reference_millers self.Fit_I = Fit["I"] self.Fit_I_stddev = Fit_stddev["I"] self.I_visited = I_visited self.Fit = Fit self.experiments = experiment_manager