def importInstrument(self): print "Importing instrument...." self.is_upload = False # guess instrument from the last image sinedon.setConfig('leginondata', db=self.source_dbname) q = leginondata.AcquisitionImageData(session=self.getSourceSession()) last_image = self.research(q, True) # we know there are images for the session. tem = last_image['scope']['tem'] camera = last_image['camera']['ccdcamera'] if tem is None: # old uploaded session such as 12jun29b self.is_upload = True return None, None, 200000 high_tension = last_image['scope']['high tension'] q = leginondata.InstrumentData() source_temdata = q.direct_query(tem.dbid) source_cameradata = q.direct_query(camera.dbid) if 'appion' in source_temdata['name'].lower(): self.is_upload = True sinedon.setConfig('leginondata', db=self.destination_dbname) tem.insert(archive=True) camera.insert(archive=True) return source_cameradata, source_temdata, high_tension
def run(self): sinedon.setConfig('leginondata', db=self.source_dbname) userdata = leginondata.UserData(username='******').query()[0] self.publish([ userdata, ]) self.adminid = userdata.dbid
def importMosaicTiles(self): source_session = self.getSourceSession() print "Importing mosaic tiles...." sinedon.setConfig('leginondata', db=self.source_dbname) q = leginondata.MosaicTileData(session=source_session) results = self.research(q) self.publish(results)
def makeTables(sinedonname,modulename,dbname=None,xmlfile=None,check_exist=False): ### use alternate db name if desired if dbname is not None: print "setting alternate database name" sinedon.setConfig(sinedonname, db=dbname) ### connect to DB dbconf = sinedon.getConfig(sinedonname) dbd = sqldict.SQLDict(**dbconf) ### import desire module module = __import__(modulename) modbase = re.sub("^.*\.", "", modulename) tableData = getattr(module, modbase) ## hope this works ### get module members funcs = inspect.getmembers(tableData, inspect.isclass) print "Found %d classes in module"%(len(funcs)) #print funcs ### parse members count = 0 if xmlfile is not None: xmlf = open(xmlfile, 'w') xmlf.write("<defaulttables>\n <definition>\n") for func in funcs: ### Check if member is valid len 2 tuple if len(func) != 2: continue ### Check if member is a sinedon Data class if not issubclass(func[1], sinedon.data.Data) or func[0] == "Data": continue ### Create table tablename = func[0] tableclass = func[1]() table = (dbname, tablename) definition, formatedData = sqldict.dataSQLColumns(tableclass, False) create_flag=False if check_exist: try: dbd.diffSQLTable(tablename,definition) except (MySQLdb.ProgrammingError, MySQLdb.OperationalError), e: errno = e.args[0] ## some version of mysqlpython parses the exception differently if not isinstance(errno, int): errno = errno.args[0] ## 1146: table does not exist if errno in (1146,): print tablename,' does not yet exist, and will be created' create_flag=True else: create_flag=True if create_flag: if xmlfile is None: dbd.createSQLTable(table, definition) else: definitionToXml(xmlf, tablename, definition) count += 1
def importByImages(self): source_session = self.getSourceSession() sinedon.setConfig('leginondata', db=self.source_dbname) q = leginondata.AcquisitionImageData(session=source_session) images = q.query() images.reverse() print 'number of images in the session = %d' % len(images) targetlist = {} for image in images: q = leginondata.ImageTargetListData(image=image) targetlist[image.dbid] = self.research(q, True) self.imageids.append(image.dbid) sinedon.setConfig('leginondata', db=self.destination_dbname) skipped = 0 for i, image in enumerate(images): if not (i + 1) % 20: print "" else: print ".", if image['label'] in exclude_preset_list: skipped += 1 continue imageid = image.dbid image.insert(archive=True) if targetlist[imageid]: targetlist[imageid].insert(archive=True) print '\nimported %d images' % (len(images) - skipped)
def setDBfromProjectId(projectid, die=True): newdbname = getAppionDBFromProjectId(projectid, die=die) if newdbname is None: return False sinedon.setConfig('appiondata', db=newdbname) apDisplay.printColor("Connected to database: '"+newdbname+"'", "green") return True
def findBrightImageFromNorm(self,normdata): ''' Find BrighetImageData based on imported NormImageData. This is needed for older data since BrightImageData was not linked to AcquisitionImages previously. ''' if normdata['bright']: return normdata['bright'] sinedon.setConfig('projectdata', db=self.source_dbname) timestamp = normdata.timestamp normcam = normdata['camera'] qcam = projectdata.CameraEMData(dimension=normcam['dimension'], offset=normcam['offset'], binning=normcam['binning'], ccdcamera=normcam['ccdcamera']) qcam['exposure type'] = 'normal' qcam['energy filtered'] = normcam['energy filtered'] normscope = normdata['scope'] qscope = projectdata.ScopeEMData(tem=normscope['tem']) qscope['high tension'] = normscope['high tension'] q = projectdata.BrightImageData(camera=qcam,scope=qscope,channel=normdata['channel']) brightlist = q.query() for brightdata in brightlist: if brightdata.timestamp < timestamp: break return brightdata
def checkConflicts(self): ### setup correct database after we have read the project id if 'projectid' in self.params and self.params['projectid'] is not None: apDisplay.printMsg("Using split database") # use a project database newdbname = apProject.getAppionDBFromProjectId( self.params['projectid']) sinedon.setConfig('appiondata', db=newdbname) apDisplay.printColor("Connected to database: '" + newdbname + "'", "green") # DD processes self.dd = apDDprocess.DDStackProcessing() print self.dd # get stack data self.stackdata = appiondata.ApStackData.direct_query( self.params['stackid']) self.stackparts = apStack.getStackParticlesFromId( self.params['stackid'], msg=True) self.sessiondata = apStack.getSessionDataFromStackId( self.params['stackid']) # query image qimage = self.stackparts[0]['particle']['image'] # DD info self.dd.setImageData(qimage) self.dd.setDDStackRun(self.params['ddstackid']) self.ddstackpath = self.dd.getDDStackRun()['path']['path']
def importInstrument(self): print "Importing instrument...." self.is_upload = False # guess instrument from the last image sinedon.setConfig('leginondata', db=self.source_dbname) q = leginondata.AcquisitionImageData(session=self.getSourceSession()) last_image = self.research(q,True) # we know there are images for the session. tem = last_image['scope']['tem'] camera = last_image['camera']['ccdcamera'] if tem is None: # old uploaded session such as 12jun29b self.is_upload = True return None, None, 200000 high_tension = last_image['scope']['high tension'] q = leginondata.InstrumentData() source_temdata = q.direct_query(tem.dbid) source_cameradata = q.direct_query(camera.dbid) if 'appion' in source_temdata['name'].lower(): self.is_upload = True sinedon.setConfig('leginondata', db=self.destination_dbname) tem.insert(archive=True) camera.insert(archive=True) return source_cameradata, source_temdata, high_tension
def checkConflicts(self): ### setup correct database after we have read the project id if 'projectid' in self.params and self.params['projectid'] is not None: apDisplay.printMsg("Using split database") # use a project database newdbname = apProject.getAppionDBFromProjectId(self.params['projectid']) sinedon.setConfig('appiondata', db=newdbname) apDisplay.printColor("Connected to database: '"+newdbname+"'", "green") # DD processes self.dd = apDDprocess.DDStackProcessing() print self.dd # get stack data self.stackdata = appiondata.ApStackData.direct_query(self.params['stackid']) self.stackparts = apStack.getStackParticlesFromId(self.params['stackid'], msg=True) self.sessiondata = apStack.getSessionDataFromStackId(self.params['stackid']) # query image qimage = self.stackparts[0]['particle']['image'] # DD info self.dd.setImageData(qimage) self.dd.setDDStackRun(self.params['ddstackid']) self.ddstackpath = self.dd.getDDStackRun()['path']['path']
def importByImages(self): source_session = self.getSourceSession() sinedon.setConfig('leginondata', db=self.source_dbname) q = leginondata.AcquisitionImageData(session=source_session) images = q.query() images.reverse() print 'number of images in the session = %d' % len(images) targetlist = {} for image in images: q = leginondata.ImageTargetListData(image=image) targetlist[image.dbid] = self.research(q,True) self.imageids.append(image.dbid) sinedon.setConfig('leginondata', db=self.destination_dbname) skipped = 0 for i,image in enumerate(images): if not (i+1) % 20: print "" else: print ".", if image['label'] in exclude_preset_list: skipped += 1 continue imageid = image.dbid image.insert(archive=True) if targetlist[imageid]: targetlist[imageid].insert(archive=True) print '\nimported %d images' % (len(images) - skipped)
def run(self): userids = self.getEssentialUsers() for id in userids: sinedon.setConfig('leginondata', db=self.source_dbname) print 'querying user %d' % id userdata = leginondata.UserData().direct_query(id) self.publish([userdata,])
def importProjectValueDependentData(self,dataclassname,value,search_alias): sinedon.setConfig('projectdata', db=self.source_dbname) print "Importing %s...." % (dataclassname) q = getattr(projectdata,dataclassname)() q[search_alias] = value results = self.research(q) self.publish(results) return results
def setDestinationSession(self, sessionname): self.destination_session = None sinedon.setConfig('leginondata', db=self.destination_dbname) q = leginondata.SessionData(name=sessionname) r = q.query() if r: session = r[0] self.destination_session = session self.reset()
def run(self): userids = self.getEssentialUsers() for id in userids: sinedon.setConfig('leginondata', db=self.source_dbname) print 'querying user %d' % id userdata = leginondata.UserData().direct_query(id) self.publish([ userdata, ])
def importQueue(self): source_session = self.getSourceSession() print "Importing queuing...." sinedon.setConfig('leginondata', db=self.source_dbname) q = leginondata.QueueData(session=source_session) r = q.query() r.reverse() sinedon.setConfig('leginondata', db=self.destination_dbname) for queue in r: queue.insert(archive=True)
def publish(self,results): ''' Publish query results to destination database. ''' if not results: return # configuration must be set before any query sinedon.setConfig('projectdata', db=self.destination_dbname) for q in results: q.insert(archive=True) self.reset()
def importProject(self): print "Importing project...." projectdata = self.getSourceProject() sinedon.setConfig('projectdata', db=self.destination_dbname) projectdata.insert(force=False,archive=True) projectdata = self.getDestinationProject() if not projectdata: self.escape("Session Not Inserted Successfully") return
def importDeQueue(self): source_session = self.getSourceSession() # ImageTargetLists that have no targets on will also be imported in this function print "Importing dequeuing...." sinedon.setConfig('leginondata', db=self.source_dbname) q = leginondata.DequeuedImageTargetListData(session=source_session) r = q.query() r.reverse() sinedon.setConfig('leginondata', db=self.destination_dbname) for queue in r: queue.insert(archive=True)
def checkConflicts(self): ### setup correct database after we have read the project id if 'projectid' in self.params and self.params['projectid'] is not None: apDisplay.printMsg("Using split database") # use a project database newdbname = apProject.getAppionDBFromProjectId(self.params['projectid']) sinedon.setConfig('appiondata', db=newdbname) apDisplay.printColor("Connected to database: '"+newdbname+"'", "green") ### get stack data self.stack = {} if self.params['stackid'] is not None: self.stack['data'] = apStack.getOnlyStackData(self.params['stackid']) self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid']) self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid']) self.stack['file'] = os.path.join(self.stack['data']['path']['path'], self.stack['data']['name']) else: self.stack['data'] = appiondata.ApAlignStackData.direct_query(self.params['alignstackid']) self.stack['apix'] = self.stack['pixelsize'] self.stack['boxsize'] = self.stack['boxsize'] self.stack['file'] = self.stack['imagicfile'] ### check conflicts if self.params['stackid'] is None and self.params['alignstackid'] is None: apDisplay.printError("stack id OR alignstack id was not defined") if self.params['stackid'] is not None and self.params['alignstackid'] is not None: apDisplay.printError("either specify stack id OR alignstack id, not both") if self.params['generations'] is None: apDisplay.printError("number of generations was not provided") maxparticles = 500000 if self.params['numpart'] > maxparticles: apDisplay.printError("too many particles requested, max: " + str(maxparticles) + " requested: " + str(self.params['numpart'])) if self.params['numpart'] > apFile.numImagesInStack(self.stack['file']): apDisplay.printError("trying to use more particles "+str(self.params['numpart']) +" than available "+str(apFile.numImagesInStack(self.stack['file']))) if self.params['numpart'] is None: self.params['numpart'] = apFile.numImagesInStack(self.stack['file']) boxsize = apStack.getStackBoxsize(self.params['stackid']) if self.params['ou'] is None: self.params['ou'] = (boxsize / 2.0) - 2 self.clipsize = int(math.floor(boxsize/float(self.params['bin']*2)))*2 if self.params['clipsize'] is not None: if self.params['clipsize'] > self.clipsize: apDisplay.printError("requested clipsize is too big %d > %d" %(self.params['clipsize'],self.clipsize)) self.clipsize = self.params['clipsize'] self.mpirun = self.checkMPI() if self.mpirun is None: apDisplay.printError("There is no MPI installed")
def importImageDDinfo(self): ''' Import DDInfoData based on imported image list. This must be done after images are imported. ''' print "Importing image ddinfo...." #source_session = self.getSourceSession() for imageid in self.imageids: sinedon.setConfig('leginondata', db=self.source_dbname) image = leginondata.AcquisitionImageData().direct_query(imageid) q = leginondata.DDinfoValueData(camera=image['camera']) results = self.research(q) self.publish(results)
def importFocusResults(self): ''' Import Focuser Results ''' source_session = self.getSourceSession() print "Importing focus results...." sinedon.setConfig('leginondata', db=self.source_dbname) qfocus = leginondata.FocuserResultData(session=source_session) focii = qfocus.query() focii.reverse() sinedon.setConfig('leginondata', db=self.destination_dbname) for focus in focii: focus.insert(archive=True)
def research(self,q,most_recent=False): ''' Query results from source database. Sorted by entry time. Oldest fist ''' # configuration must be set before any query sinedon.setConfig('projectdata', db=self.source_dbname) if most_recent: r = q.query(results=1) if r: return r[0] else: r = q.query() r.reverse() return r
def importSession(self, comment=''): print "Importing session...." session = self.getSourceSession() source_sessionid = session.dbid # change session description if needed if comment: self.replaceItem(session,'comment',comment) sinedon.setConfig('leginondata', db=self.destination_dbname) session.insert(force=False,archive=True) q = leginondata.SessionData() sessiondata = q.direct_query(session.dbid) if not sessiondata: self.escape("Session Not Inserted Successfully") return self.setDestinationSession(sessiondata)
def importSession(self, comment=''): print "Importing session...." session = self.getSourceSession() source_sessionid = session.dbid # change session description if needed if comment: self.replaceItem(session, 'comment', comment) sinedon.setConfig('leginondata', db=self.destination_dbname) session.insert(force=False, archive=True) q = leginondata.SessionData() sessiondata = q.direct_query(session.dbid) if not sessiondata: self.escape("Session Not Inserted Successfully") return self.setDestinationSession(sessiondata)
def importBrightImages(self): ''' Import BrighetImageData based on imported NormImageData. This is needed for older data since BrightImageData was not linked to AcquisitionImages previously. ''' print "Importing old BrightImages...." destination_session = self.getDestinationSession() sinedon.setConfig('leginondata', db=self.destination_dbname) q = leginondata.NormImageData(session=destination_session) r = q.query() allbrights = [] for normdata in r: allbrights.append(self.findBrightImageFromNorm(normdata)) sinedon.setConfig('leginondata', db=self.destination_dbname) for bright in allbrights: if bright: bright.insert(archive=True)
def importDrifts(self): source_session = self.getSourceSession() print "Importing drift...." sinedon.setConfig('leginondata', db=self.source_dbname) q = leginondata.DriftData(session=source_session) drifts = q.query() drifts.reverse() allmonitors = [] # driftmonitor result has no session for source_drift in drifts: q = leginondata.DriftMonitorResultData(final=source_drift) monitors = q.query() monitors.reverse() allmonitors.extend(monitors) sinedon.setConfig('leginondata', db=self.destination_dbname) for drift in drifts: drift.insert(archive=True) for monitor in allmonitors: monitor.insert(archive=True)
def researchCalibration(self, classname, **kwargs): ''' Find calibration that may be used by the source session. This could be those in the session or the last one in a previous session. ''' source_session = self.getSourceSession() sinedon.setConfig('leginondata', db=self.source_dbname) # search in the session q = self.makequery(classname,kwargs) q['session'] = source_session r1 = q.query() # seartch for one before session started q = self.makequery(classname,kwargs) t = source_session.timestamp sessiontime = self.makeTimeStringFromTimeStamp(t) r2 = q.query(results=1,timelimit='19000000000000\t%s' % (sessiontime,)) if r2: r1.append(r2[0]) r1.reverse() return r1
def researchCalibration(self, classname, **kwargs): ''' Find calibration that may be used by the source session. This could be those in the session or the last one in a previous session. ''' source_session = self.getSourceSession() sinedon.setConfig('leginondata', db=self.source_dbname) # search in the session q = self.makequery(classname, kwargs) q['session'] = source_session r1 = q.query() # seartch for one before session started q = self.makequery(classname, kwargs) t = source_session.timestamp sessiontime = self.makeTimeStringFromTimeStamp(t) r2 = q.query(results=1, timelimit='19000000000000\t%s' % (sessiontime, )) if r2: r1.append(r2[0]) r1.reverse() return r1
def importGainReferences(self): print "Importing GainReferences...." q = leginondata.AcquisitionImageData(session=self.getSourceSession()) images = self.research(q, False) c_client = correctorclient.CorrectorClient() norm_ids = [] for image in images: if image['norm'] and image['norm'].dbid not in norm_ids: norm_ids.append(image['norm'].dbid) # also archive alternative channel altnorm = c_client.getAlternativeChannelNorm(image['norm']) if altnorm and altnorm.dbid not in norm_ids: norm_ids.append(altnorm.dbid) # ascending sort norm_ids.sort() norms = [] for dbid in norm_ids: norms.append(leginondata.NormImageData.direct_query(dbid)) sinedon.setConfig('leginondata', db=self.destination_dbname) for norm in norms: if norm: norm.insert(archive=True) if norm['dark']: norm['dark'].insert(archive=True)
def importGainReferences(self): print "Importing GainReferences...." q = leginondata.AcquisitionImageData(session=self.getSourceSession()) images = self.research(q,False) c_client = correctorclient.CorrectorClient() norm_ids = [] for image in images: if image['norm'] and image['norm'].dbid not in norm_ids: norm_ids.append(image['norm'].dbid) # also archive alternative channel altnorm = c_client.getAlternativeChannelNorm(image['norm']) if altnorm and altnorm.dbid not in norm_ids: norm_ids.append(altnorm.dbid) # ascending sort norm_ids.sort() norms = [] for dbid in norm_ids: norms.append(leginondata.NormImageData.direct_query(dbid)) sinedon.setConfig('leginondata', db=self.destination_dbname) for norm in norms: if norm: norm.insert(archive=True) if norm['dark']: norm['dark'].insert(archive=True)
def __initDB (self, jobObject, job): retValue = None try: #Determine the appion project database name using the project id. projDBConfig = sinedon.getConfig('projectdata') dbConnection = MySQLdb.connect(**projDBConfig) cursor = dbConnection.cursor() query = "SELECT appiondb from processingdb WHERE `REF|projects|project`=%d" % (jobObject.getProjectId()) queryResult=cursor.execute(query) if queryResult: projDB = cursor.fetchone()[0] projDBConfig = sinedon.setConfig('appiondata', db=projDB) retValue = projDB cursor.close() dbConnection.close() except MySQLdb.DatabaseError, e: sys.stderr.write("Warning: Failure determining project database: %s \n" % (e))
def __initDB (self, jobObject, job): retValue = None try: #Determine the appion project database name using the project id. projDBConfig = sinedon.getConfig('projectdata') dbConnection = MySQLdb.connect(**projDBConfig) dbConnection.autocommit(True) cursor = dbConnection.cursor() query = "SELECT appiondb from processingdb WHERE `REF|projects|project`=%d" % (jobObject.getProjectId()) queryResult=cursor.execute(query) if queryResult: projDB = cursor.fetchone()[0] projDBConfig = sinedon.setConfig('appiondata', db=projDB) retValue = projDB cursor.close() dbConnection.close() except MySQLdb.DatabaseError, e: sys.stderr.write("Warning: Failure determining project database: %s \n" % (e))
status = sys.argv[2] if len(sys.argv) > 3: projectid = sys.argv[3] # set new db if projectid is not None: pjc = sinedon.getConfig('projectdata') q = "SELECT appiondb FROM processingdb WHERE `REF|projects|project`='%s'" % (projectid,) dbc = MySQLdb.Connect(**pjc) dbc.autocommit(True) cursor = dbc.cursor() result = cursor.execute(q) if result: newdbname, = cursor.fetchone() sinedon.setConfig('appiondata', db=newdbname) cursor.close() dbc.close() # connect to database c = sinedon.getConfig('appiondata') dbc = MySQLdb.Connect(**c) dbc.autocommit(True) cursor = dbc.cursor() # execute update q = "UPDATE ApAppionJobData SET `status` = '%s' WHERE `DEF_id` = '%s'" %(status,jobid) cursor.execute(q) # close
def checkConflicts(self): ### setup correct database after we have read the project id if 'projectid' in self.params and self.params['projectid'] is not None: apDisplay.printMsg("Using split database") # use a project database newdbname = apProject.getAppionDBFromProjectId( self.params['projectid']) sinedon.setConfig('appiondata', db=newdbname) apDisplay.printColor("Connected to database: '" + newdbname + "'", "green") if self.params['stackid'] is None: apDisplay.printError("stackid was not defined") if self.params['expweight'] is False: apDisplay.printWarning( "Exposure weighting is turned off, make sure this is what you want" ) if self.params['localavg'] is False: apDisplay.printWarning( "Trajectory local averaging is turned off, make sure this is what you want" ) # DD processes self.dd = apDDprocess.DDStackProcessing() print self.dd # get stack data self.stackdata = appiondata.ApStackData.direct_query( self.params['stackid']) self.stackparts = apStack.getStackParticlesFromId( self.params['stackid'], msg=True) self.sessiondata = apStack.getSessionDataFromStackId( self.params['stackid']) # query image qimage = self.stackparts[0]['particle']['image'] # pixel size info self.params['apix'] = apStack.getMicrographPixelSizeFromStackId( self.params['stackid']) self.params['box'] = self.stackdata['boxsize'] self.params['particleradius'] = self.params[ 'particleradius'] / self.params['apix'] if self.params['particleradius'] > self.params['box'] / 2.0: apDisplay.printWarning( "specified particle radius greater than box radius, \ setting particle radius to 0.8 * boxsize") # micrograph & frame info frames = qimage['use frames'] nframes = len(frames) if self.params['framelastali'] is None: self.params['framelastali'] = frames[-1] if self.params['framelastave'] is None: self.params['framelastave'] = frames[-1] # microscope kV self.params['kv'] = qimage['scope']['high tension'] / 1000.0 # query exposure per frame, if not set here if self.params['total_dose'] is not None: dose = self.params['total_dose'] else: try: dose = apDatabase.getDoseFromImageData(qimage) except: apDisplay.printError( "dose not specified and not in database, please specify explicitly" ) if self.params['expperframe'] is None and self.params[ 'expweight'] is True: if dose is not None: self.params['expperframe'] = dose / nframes else: apDisplay.printError( "exposure per frame needs to be specified, cannot find in database" ) # dimensions self.params['framex'] = int( apDatabase.getDimensionsFromImageData(qimage)['x']) self.params['framey'] = int( apDatabase.getDimensionsFromImageData(qimage)['y']) # DD info self.dd.setImageData(qimage) self.dd.setDDStackRun(self.params['ddstackid']) self.ddstackpath = self.dd.getDDStackRun()['path']['path']
def makeTables(sinedonname, modulename, dbname=None, xmlfile=None, check_exist=False): ### use alternate db name if desired if dbname is not None: print "setting alternate database name" sinedon.setConfig(sinedonname, db=dbname) ### connect to DB dbconf = sinedon.getConfig(sinedonname) dbd = sqldict.SQLDict(**dbconf) ### import desire module module = __import__(modulename) modbase = re.sub("^.*\.", "", modulename) tableData = getattr(module, modbase) ## hope this works ### get module members funcs = inspect.getmembers(tableData, inspect.isclass) print "Found %d classes in module" % (len(funcs)) #print funcs ### parse members count = 0 if xmlfile is not None: xmlf = open(xmlfile, 'w') xmlf.write("<defaulttables>\n <definition>\n") for func in funcs: ### Check if member is valid len 2 tuple if len(func) != 2: continue ### Check if member is a sinedon Data class if not issubclass(func[1], sinedon.data.Data) or func[0] == "Data": continue ### Create table tablename = func[0] tableclass = func[1]() table = (dbname, tablename) definition, formatedData = sqldict.dataSQLColumns(tableclass, False) create_flag = False if check_exist: try: dbd.diffSQLTable(tablename, definition) except (MySQLdb.ProgrammingError, MySQLdb.OperationalError), e: errno = e.args[0] ## some version of mysqlpython parses the exception differently if not isinstance(errno, int): errno = errno.args[0] ## 1146: table does not exist if errno in (1146, ): print tablename, ' does not yet exist, and will be created' create_flag = True else: create_flag = True if create_flag: if xmlfile is None: dbd.createSQLTable(table, definition) else: definitionToXml(xmlf, tablename, definition) count += 1
if len(sys.argv) > 3: projectid = sys.argv[3] # set new db if projectid is not None: pjc = sinedon.getConfig('projectdata') q = "SELECT appiondb FROM processingdb WHERE `REF|projects|project`='%s'" % ( projectid, ) dbc = MySQLdb.Connect(**pjc) dbc.autocommit(True) cursor = dbc.cursor() result = cursor.execute(q) if result: newdbname, = cursor.fetchone() sinedon.setConfig('appiondata', db=newdbname) cursor.close() dbc.close() # connect to database c = sinedon.getConfig('appiondata') dbc = MySQLdb.Connect(**c) dbc.autocommit(True) cursor = dbc.cursor() # execute update q = "UPDATE ApAppionJobData SET `status` = '%s' WHERE `DEF_id` = '%s'" % ( status, jobid) cursor.execute(q)
except: print "no data found for %s" % ref gaindict["meanlst"] = meanlst gaindict["stdlst"] = stdlst gaindict["timelst"] = timelst gaindict["templst"] = templst return gaindict if __name__ == "__main__": args = parseArguments() totalimages = args.n sinedon.setConfig("leginondata") darkq = leginondata.DarkImageData() camq = leginondata.CameraEMData() camq["binning"] = {"y": long(args.b), "x": long(args.b)} camq["dimension"] = {"y": long(args.y), "x": long(args.x)} darkq["camera"] = camq print "querying dark images" darkdata = darkq.query(results=totalimages) darkdata.reverse() darkd = getGainInfo(darkdata, totalimages) brightq = leginondata.BrightImageData() brightq["camera"] = camq print "querying bright images" brightdata = brightq.query(results=totalimages)
def reset(self): ''' reset configuration to source db to avoid confusion ''' sinedon.setConfig('projectdata', db=self.source_dbname)
def checkConflicts(self): ### setup correct database after we have read the project id if 'projectid' in self.params and self.params['projectid'] is not None: apDisplay.printMsg("Using split database") # use a project database newdbname = apProject.getAppionDBFromProjectId(self.params['projectid']) sinedon.setConfig('appiondata', db=newdbname) apDisplay.printColor("Connected to database: '"+newdbname+"'", "green") if self.params['stackid'] is None: apDisplay.printError("stackid was not defined") if self.params['expweight'] is False: apDisplay.printWarning("Exposure weighting is turned off, make sure this is what you want") if self.params['localavg'] is False: apDisplay.printWarning("Trajectory local averaging is turned off, make sure this is what you want") # DD processes self.dd = apDDprocess.DDStackProcessing() print self.dd # get stack data self.stackdata = appiondata.ApStackData.direct_query(self.params['stackid']) self.stackparts = apStack.getStackParticlesFromId(self.params['stackid'], msg=True) self.sessiondata = apStack.getSessionDataFromStackId(self.params['stackid']) # query image qimage = self.stackparts[0]['particle']['image'] # pixel size info self.params['apix'] = apStack.getMicrographPixelSizeFromStackId(self.params['stackid']) self.params['box'] = self.stackdata['boxsize'] self.params['particleradius'] = self.params['particleradius'] / self.params['apix'] if self.params['particleradius'] > self.params['box'] / 2.0: apDisplay.printWarning("specified particle radius greater than box radius, \ setting particle radius to 0.8 * boxsize") # micrograph & frame info frames = qimage['use frames'] nframes = len(frames) if self.params['framelastali'] is None: self.params['framelastali'] = frames[-1] if self.params['framelastave'] is None: self.params['framelastave'] = frames[-1] # microscope kV self.params['kv'] = qimage['scope']['high tension']/1000.0 # query exposure per frame, if not set here if self.params['total_dose'] is not None: dose = self.params['total_dose'] else: try: dose = apDatabase.getDoseFromImageData(qimage) except: apDisplay.printError("dose not specified and not in database, please specify explicitly") if self.params['expperframe'] is None and self.params['expweight'] is True: if dose is not None: self.params['expperframe'] = dose / nframes else: apDisplay.printError("exposure per frame needs to be specified, cannot find in database") # dimensions self.params['framex'] = int(apDatabase.getDimensionsFromImageData(qimage)['x']) self.params['framey'] = int(apDatabase.getDimensionsFromImageData(qimage)['y']) # DD info self.dd.setImageData(qimage) self.dd.setDDStackRun(self.params['ddstackid']) self.ddstackpath = self.dd.getDDStackRun()['path']['path']
def setDestinationProject(self, projectid): self.destination_project = None sinedon.setConfig('projectdata', db=self.destination_dbname) project = projectdata.projects().direct_query(projectid) self.destination_project = project self.reset()
except: print "no data found for %s" % ref gaindict['meanlst'] = meanlst gaindict['stdlst'] = stdlst gaindict['timelst'] = timelst gaindict['templst'] = templst return gaindict if __name__ == '__main__': args = parseArguments() totalimages = args.n sinedon.setConfig('leginondata') darkq = leginondata.DarkImageData() camq = leginondata.CameraEMData() camq['binning'] = {'y': long(args.b), 'x': long(args.b)} camq['dimension'] = {'y': long(args.y), 'x': long(args.x)} darkq['camera'] = camq print "querying dark images" darkdata = darkq.query(results=totalimages) darkdata.reverse() darkd = getGainInfo(darkdata, totalimages) brightq = leginondata.BrightImageData() brightq['camera'] = camq print "querying bright images" brightdata = brightq.query(results=totalimages)
def setSourceProject(self, projectid): sinedon.setConfig('projectdata', db=self.source_dbname) self.source_project = projectdata.projects().direct_query(projectid)