def get_frames_from_mysql(self,params): T = Timer("frames") CART = manager(params) db = CART.connection() cursor = db.cursor() cursor.execute("SELECT * FROM %s_frame;"%params.mysql.runtag) ALL = cursor.fetchall() from cctbx.crystal_orientation import crystal_orientation orientations = [crystal_orientation( (a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15],a[16]),False) for a in ALL] return dict( frame_id = flex.int( [a[0] for a in ALL] ), wavelength = flex.double( [a[1] for a in ALL] ), beam_x = flex.double( [a[2] for a in ALL] ), beam_y = flex.double( [a[3] for a in ALL] ), distance = flex.double( [a[4] for a in ALL] ), orientation = orientations, rotation100_rad = flex.double([a[17] for a in ALL] ), rotation010_rad = flex.double([a[18] for a in ALL] ), rotation001_rad = flex.double([a[19] for a in ALL] ), half_mosaicity_deg = flex.double([a[20] for a in ALL] ), wave_HE_ang = flex.double([a[21] for a in ALL] ), wave_LE_ang = flex.double([a[22] for a in ALL] ), domain_size_ang = flex.double([a[23] for a in ALL] ), unique_file_name = [a[24] for a in ALL], )
def get_frames_from_mysql(self, params): T = Timer("frames") CART = manager(params) db = CART.connection() cursor = db.cursor() cursor.execute("SELECT * FROM %s_frame;" % params.mysql.runtag) ALL = cursor.fetchall() from cctbx.crystal_orientation import crystal_orientation orientations = [ crystal_orientation( (a[8], a[9], a[10], a[11], a[12], a[13], a[14], a[15], a[16]), False) for a in ALL ] return dict( frame_id=flex.int([a[0] for a in ALL]), wavelength=flex.double([a[1] for a in ALL]), beam_x=flex.double([a[2] for a in ALL]), beam_y=flex.double([a[3] for a in ALL]), distance=flex.double([a[4] for a in ALL]), orientation=orientations, rotation100_rad=flex.double([a[17] for a in ALL]), rotation010_rad=flex.double([a[18] for a in ALL]), rotation001_rad=flex.double([a[19] for a in ALL]), half_mosaicity_deg=flex.double([a[20] for a in ALL]), wave_HE_ang=flex.double([a[21] for a in ALL]), wave_LE_ang=flex.double([a[22] for a in ALL]), domain_size_ang=flex.double([a[23] for a in ALL]), unique_file_name=[a[24] for a in ALL], )
def scale_all (self, file_names) : t1 = time.time() if self.params.backend == 'MySQL': from xfel.cxi.merging_database import manager elif self.params.backend == 'SQLite': from xfel.cxi.merging_database_sqlite3 import manager elif self.params.backend == 'FS': from xfel.cxi.merging_database_fs import manager elif self.params.backend == 'Flex': from xfel.cxi.merging_database_flex import manager import multiprocessing print "Allocating intensities" intensity_proxy = multiprocessing.Array('d',self.params.memory.shared_array_allocation,lock=True) print "Allocating sigmas" sigma_proxy = multiprocessing.Array('d',self.params.memory.shared_array_allocation,lock=True) print "Allocating frame_id" frame_proxy = multiprocessing.Array('l',self.params.memory.shared_array_allocation,lock=True) print "Allocating miller_id" miller_proxy = multiprocessing.Array('l',self.params.memory.shared_array_allocation,lock=True) H_proxy = multiprocessing.Array('i',self.params.memory.shared_array_allocation,lock=True) K_proxy = multiprocessing.Array('i',self.params.memory.shared_array_allocation,lock=True) L_proxy = multiprocessing.Array('i',self.params.memory.shared_array_allocation,lock=True) xtal_proxy = multiprocessing.Array('c',self.params.memory.shared_array_allocation,lock=True) print "Finished allocating" rows_observation = multiprocessing.Array('l',[0],lock=True) data_dict = dict(intensity_proxy=intensity_proxy, sigma_proxy=sigma_proxy, frame_proxy=frame_proxy, miller_proxy=miller_proxy, H_proxy=H_proxy, K_proxy=K_proxy, L_proxy=L_proxy, rows=rows_observation, xtal_proxy=xtal_proxy ) db_mgr = manager(self.params,data_dict) db_mgr.initialize_db(self.miller_set) # Unless the number of requested processes is greater than one, # try parallel multiprocessing on a parallel host. Block until # all database commands have been processed. nproc = self.params.nproc if (nproc is None) or (nproc is Auto): nproc = libtbx.introspection.number_of_processors() if nproc > 1: try : import multiprocessing self._scale_all_parallel(file_names, db_mgr) except ImportError, e : print >> self.log, \ "multiprocessing module not available (requires Python >= 2.6)\n" \ "will scale frames serially" self._scale_all_serial(file_names, db_mgr)
def read_all_mysql(self): print "reading observations from %s database" % (self.params.backend) if self.params.backend == 'MySQL': from xfel.cxi.merging_database import manager elif self.params.backend == 'SQLite': from xfel.cxi.merging_database_sqlite3 import manager else: from xfel.cxi.merging_database_fs import manager CART = manager(self.params) self.millers_mysql = CART.read_indices() self.millers = self.millers_mysql self.observations_mysql = CART.read_observations() parser = column_parser() parser.set_int("hkl_id", self.observations_mysql["hkl_id"]) parser.set_double("i", self.observations_mysql["i"]) parser.set_double("sigi", self.observations_mysql["sigi"]) parser.set_int("frame_id", self.observations_mysql["frame_id"]) parser.set_int("H", self.observations_mysql["original_h"]) parser.set_int("K", self.observations_mysql["original_k"]) parser.set_int("L", self.observations_mysql["original_l"]) self._observations_mysql = parser self.observations = dict( hkl_id=parser.get_int("hkl_id"), i=parser.get_double("i"), sigi=parser.get_double("sigi"), frame_id=parser.get_int("frame_id"), H=parser.get_int("H"), K=parser.get_int("K"), L=parser.get_int("L"), ) self.frames_mysql = CART.read_frames() parser = column_parser() parser.set_int("frame_id", self.frames_mysql["frame_id"]) parser.set_double("wavelength", self.frames_mysql["wavelength"]) parser.set_double("cc", self.frames_mysql["cc"]) try: parser.set_double("slope", self.frames_mysql["slope"]) parser.set_double("offset", self.frames_mysql["offset"]) if self.params.scaling.report_ML: parser.set_double("domain_size_ang", self.frames_mysql["domain_size_ang"]) parser.set_double("half_mosaicity_deg", self.frames_mysql["half_mosaicity_deg"]) except KeyError: pass self._frames_mysql = parser CART.join()
def get_obs_from_mysql(self,params): T = Timer("database") CART = manager(params) db = CART.connection() cursor = db.cursor() cursor.execute("SELECT DISTINCT frame_id FROM %s_spotfinder;"%params.mysql.runtag) AAA = cursor.fetchall() print "From the CV log file text output there are %d distinct frames with spotfinder spots"%len(AAA) if params.max_frames==0: cursor.execute("SELECT * FROM %s_spotfinder;"%params.mysql.runtag) else: cursor.execute("SELECT * FROM %s_spotfinder WHERE frame_id<%d;"%( params.mysql.runtag, params.max_frames)) return cursor.fetchall()
def read_all_mysql(self): print "reading observations from %s database"%(self.params.backend) if self.params.backend == 'MySQL': from xfel.cxi.merging_database import manager elif self.params.backend == 'SQLite': from xfel.cxi.merging_database_sqlite3 import manager else: from xfel.cxi.merging_database_fs import manager CART = manager(self.params) self.millers_mysql = CART.read_indices() self.millers = self.millers_mysql self.observations_mysql = CART.read_observations() parser = column_parser() parser.set_int("hkl_id",self.observations_mysql["hkl_id"]) parser.set_double("i",self.observations_mysql["i"]) parser.set_double("sigi",self.observations_mysql["sigi"]) parser.set_int("frame_id",self.observations_mysql["frame_id"]) parser.set_int("H",self.observations_mysql["original_h"]) parser.set_int("K",self.observations_mysql["original_k"]) parser.set_int("L",self.observations_mysql["original_l"]) self._observations_mysql = parser self.observations = dict(hkl_id=parser.get_int("hkl_id"), i=parser.get_double("i"), sigi=parser.get_double("sigi"), frame_id=parser.get_int("frame_id"), H=parser.get_int("H"), K=parser.get_int("K"), L=parser.get_int("L"), ) self.frames_mysql = CART.read_frames() parser = column_parser() parser.set_int("frame_id",self.frames_mysql["frame_id"]) parser.set_double("wavelength",self.frames_mysql["wavelength"]) parser.set_double("cc",self.frames_mysql["cc"]) try: parser.set_double("slope",self.frames_mysql["slope"]) parser.set_double("offset",self.frames_mysql["offset"]) if self.params.scaling.report_ML: parser.set_double("domain_size_ang",self.frames_mysql["domain_size_ang"]) parser.set_double("half_mosaicity_deg",self.frames_mysql["half_mosaicity_deg"]) except KeyError: pass self._frames_mysql = parser CART.join()
def get_obs_from_mysql(self, params): T = Timer("database") CART = manager(params) db = CART.connection() cursor = db.cursor() cursor.execute("SELECT DISTINCT frame_id FROM %s_spotfinder;" % params.mysql.runtag) AAA = cursor.fetchall() print "From the CV log file text output there are %d distinct frames with spotfinder spots" % len( AAA) if params.max_frames == 0: cursor.execute("SELECT * FROM %s_spotfinder;" % params.mysql.runtag) else: cursor.execute("SELECT * FROM %s_spotfinder WHERE frame_id<%d;" % (params.mysql.runtag, params.max_frames)) return cursor.fetchall()
def scale_all(self, file_names): t1 = time.time() if self.params.backend == 'MySQL': from xfel.cxi.merging_database import manager elif self.params.backend == 'SQLite': from xfel.cxi.merging_database_sqlite3 import manager elif self.params.backend == 'FS': from xfel.cxi.merging_database_fs import manager elif self.params.backend == 'Flex': from xfel.cxi.merging_database_flex import manager import multiprocessing print "Allocating intensities" intensity_proxy = multiprocessing.Array( 'd', self.params.memory.shared_array_allocation, lock=True) print "Allocating sigmas" sigma_proxy = multiprocessing.Array( 'd', self.params.memory.shared_array_allocation, lock=True) print "Allocating frame_id" frame_proxy = multiprocessing.Array( 'l', self.params.memory.shared_array_allocation, lock=True) print "Allocating miller_id" miller_proxy = multiprocessing.Array( 'l', self.params.memory.shared_array_allocation, lock=True) H_proxy = multiprocessing.Array( 'i', self.params.memory.shared_array_allocation, lock=True) K_proxy = multiprocessing.Array( 'i', self.params.memory.shared_array_allocation, lock=True) L_proxy = multiprocessing.Array( 'i', self.params.memory.shared_array_allocation, lock=True) xtal_proxy = multiprocessing.Array( 'c', self.params.memory.shared_array_allocation, lock=True) print "Finished allocating" rows_observation = multiprocessing.Array('l', [0], lock=True) data_dict = dict(intensity_proxy=intensity_proxy, sigma_proxy=sigma_proxy, frame_proxy=frame_proxy, miller_proxy=miller_proxy, H_proxy=H_proxy, K_proxy=K_proxy, L_proxy=L_proxy, rows=rows_observation, xtal_proxy=xtal_proxy) db_mgr = manager(self.params, data_dict) db_mgr.initialize_db(self.miller_set) # Unless the number of requested processes is greater than one, # try parallel multiprocessing on a parallel host. Block until # all database commands have been processed. nproc = self.params.nproc if (nproc is None) or (nproc is Auto): nproc = libtbx.introspection.number_of_processors() if nproc > 1: try: import multiprocessing self._scale_all_parallel(file_names, db_mgr) except ImportError, e: print >> self.log, \ "multiprocessing module not available (requires Python >= 2.6)\n" \ "will scale frames serially" self._scale_all_serial(file_names, db_mgr)
def consistency_controls(DATA,params,annotate=False):#DATA is an instance of correction_vectors() PIXEL_SZ = 0.11 # mm/pixel CART = manager(params) db = CART.connection() cursor = db.cursor() for iframe in xrange(len(DATA.FRAMES["frame_id"])): frame = DATA.FRAMES["frame_id"][iframe] selection = (DATA.frame_id == frame) match_count = selection.count(True) if match_count>0: print frame, DATA.frame_id.select(selection)[0], # frame number frame_beam_x = DATA.FRAMES["beam_x"][iframe] obs_beam_x = DATA.refined_cntr_x.select(selection)[0] * PIXEL_SZ print "%7.3f"%(frame_beam_x - obs_beam_x), # agreement of beam_x in mm frame_beam_y = DATA.FRAMES["beam_y"][iframe] obs_beam_y = DATA.refined_cntr_y.select(selection)[0] * PIXEL_SZ print "%7.3f"%(frame_beam_y - obs_beam_y), # agreement of beam_y in mm #...The labelit-refined direct beam position agrees with CV_listing logfile output file_name = DATA.FRAMES["unique_file_name"][iframe] cursor.execute("SELECT COUNT(*) FROM %s_observation WHERE frame_id_0_base=%d-1;"%(params.mysql.runtag,frame)) integrated_observations = cursor.fetchall()[0][0] print "%4d <? %4d"%(match_count,integrated_observations),file_name, cursor.execute( """SELECT t1.detector_x, t1.detector_y, t1.original_h, t1.original_k, t1.original_l FROM %s_observation AS t1 WHERE t1.frame_id_0_base=%d-1 """%( params.mysql.runtag,frame)) fetched = cursor.fetchall() detector_x = [a[0] for a in fetched] detector_y = [a[1] for a in fetched] spotfx = DATA.spotfx.select(selection) spotfy = DATA.spotfy.select(selection) spotcx = DATA.spotcx.select(selection) spotcy = DATA.spotcy.select(selection) hkl = DATA.HKL.select(selection) integrated_hkl = [(int(a[2]),int(a[3]),int(a[4])) for a in fetched] # Now compute the displacement between integrated and calculated spot position. # presumably tells us about the empirical nudge factor. sq_displace = flex.double() sq_cv = flex.double() for icalc,calc_hkl in enumerate(hkl): try: jinteg = integrated_hkl.index(calc_hkl) sq_displace.append( (spotcx[icalc]-detector_x[jinteg])**2 + (spotcy[icalc]-detector_y[jinteg])**2 ) except ValueError: pass sq_cv.append( (spotcx[icalc]-spotfx[icalc])**2 + (spotcy[icalc]-spotfy[icalc])**2 ) if len(sq_displace) > 2: print "rmsd=%7.3f"%math.sqrt(flex.mean(sq_displace)), else: print "rmsd None", rmsd_cv = math.sqrt(flex.mean(sq_cv)) print "cv%7.3f"%rmsd_cv if params.show_plots is True: import os os.environ["BOOST_ADAPTBX_FPE_DEFAULT"]="1" from matplotlib import pyplot as plt plt.figure(figsize=(9,9)) plt.plot(spotcx,spotcy, markerfacecolor="g",marker=".",markeredgewidth=0,linestyle="None") plt.plot(spotfx, spotfy, markerfacecolor="r",marker=".",markeredgewidth=0,linestyle="None") plt.plot(detector_x,detector_y, markerfacecolor="b",marker=".",markeredgewidth=0,linestyle="None") if annotate: for idx in xrange(len(spotfx)): plt.annotate("%s"%str(hkl[idx]), xy=(spotfx[idx],spotfy[idx]), xytext=None, xycoords="data", textcoords="data", arrowprops=None, color="red",size=8) plt.annotate("%s"%str(hkl[idx]), xy=(spotcx[idx],spotcy[idx]), xytext=None, xycoords="data", textcoords="data", arrowprops=None, color="green",size=8) for idx in xrange(len(fetched)): plt.annotate("%s"%str(integrated_hkl[idx]), xy=(detector_x[idx],detector_y[idx]), xytext=None, xycoords="data", textcoords="data", arrowprops=None, color="blue",size=8) plt.axes().set_aspect("equal") plt.show()
def consistency_controls( DATA, params, annotate=False): #DATA is an instance of correction_vectors() PIXEL_SZ = 0.11 # mm/pixel CART = manager(params) db = CART.connection() cursor = db.cursor() for iframe in xrange(len(DATA.FRAMES["frame_id"])): frame = DATA.FRAMES["frame_id"][iframe] selection = (DATA.frame_id == frame) match_count = selection.count(True) if match_count > 0: print frame, DATA.frame_id.select(selection)[0], # frame number frame_beam_x = DATA.FRAMES["beam_x"][iframe] obs_beam_x = DATA.refined_cntr_x.select(selection)[0] * PIXEL_SZ print "%7.3f" % (frame_beam_x - obs_beam_x), # agreement of beam_x in mm frame_beam_y = DATA.FRAMES["beam_y"][iframe] obs_beam_y = DATA.refined_cntr_y.select(selection)[0] * PIXEL_SZ print "%7.3f" % (frame_beam_y - obs_beam_y), # agreement of beam_y in mm #...The labelit-refined direct beam position agrees with CV_listing logfile output file_name = DATA.FRAMES["unique_file_name"][iframe] cursor.execute( "SELECT COUNT(*) FROM %s_observation WHERE frame_id_0_base=%d-1;" % (params.mysql.runtag, frame)) integrated_observations = cursor.fetchall()[0][0] print "%4d <? %4d" % (match_count, integrated_observations), file_name, cursor.execute( """SELECT t1.detector_x, t1.detector_y, t1.original_h, t1.original_k, t1.original_l FROM %s_observation AS t1 WHERE t1.frame_id_0_base=%d-1 """ % (params.mysql.runtag, frame)) fetched = cursor.fetchall() detector_x = [a[0] for a in fetched] detector_y = [a[1] for a in fetched] spotfx = DATA.spotfx.select(selection) spotfy = DATA.spotfy.select(selection) spotcx = DATA.spotcx.select(selection) spotcy = DATA.spotcy.select(selection) hkl = DATA.HKL.select(selection) integrated_hkl = [(int(a[2]), int(a[3]), int(a[4])) for a in fetched] # Now compute the displacement between integrated and calculated spot position. # presumably tells us about the empirical nudge factor. sq_displace = flex.double() sq_cv = flex.double() for icalc, calc_hkl in enumerate(hkl): try: jinteg = integrated_hkl.index(calc_hkl) sq_displace.append((spotcx[icalc] - detector_x[jinteg])**2 + (spotcy[icalc] - detector_y[jinteg])**2) except ValueError: pass sq_cv.append((spotcx[icalc] - spotfx[icalc])**2 + (spotcy[icalc] - spotfy[icalc])**2) if len(sq_displace) > 2: print "rmsd=%7.3f" % math.sqrt(flex.mean(sq_displace)), else: print "rmsd None", rmsd_cv = math.sqrt(flex.mean(sq_cv)) print "cv%7.3f" % rmsd_cv if params.show_plots is True: import os os.environ["BOOST_ADAPTBX_FPE_DEFAULT"] = "1" from matplotlib import pyplot as plt plt.figure(figsize=(9, 9)) plt.plot(spotcx, spotcy, markerfacecolor="g", marker=".", markeredgewidth=0, linestyle="None") plt.plot(spotfx, spotfy, markerfacecolor="r", marker=".", markeredgewidth=0, linestyle="None") plt.plot(detector_x, detector_y, markerfacecolor="b", marker=".", markeredgewidth=0, linestyle="None") if annotate: for idx in xrange(len(spotfx)): plt.annotate("%s" % str(hkl[idx]), xy=(spotfx[idx], spotfy[idx]), xytext=None, xycoords="data", textcoords="data", arrowprops=None, color="red", size=8) plt.annotate("%s" % str(hkl[idx]), xy=(spotcx[idx], spotcy[idx]), xytext=None, xycoords="data", textcoords="data", arrowprops=None, color="green", size=8) for idx in xrange(len(fetched)): plt.annotate("%s" % str(integrated_hkl[idx]), xy=(detector_x[idx], detector_y[idx]), xytext=None, xycoords="data", textcoords="data", arrowprops=None, color="blue", size=8) plt.axes().set_aspect("equal") plt.show()