def createDB(): """Create sqlite file with DCS currents""" from PyCool import cool from CoolConvUtilities import AtlCoolLib, AtlCoolTool # Cleanup previous file import os if os.path.isfile("magfield.db"): os.remove("magfield.db") db = cool.DatabaseSvcFactory.databaseService().createDatabase( 'sqlite://;schema=magfield.db;dbname=CONDBR2') spec = cool.RecordSpecification() spec.extend("value", cool.StorageType.Float) spec.extend("quality_invalid", cool.StorageType.Bool) f = AtlCoolLib.ensureFolder( db, folder, spec, AtlCoolLib.athenaDesc(True, 'CondAttrListCollection')) for v in currents: sol = cool.Record(spec) sol['value'] = v[1] sol['quality_invalid'] = False tor = cool.Record(spec) tor['value'] = v[2] tor['quality_invalid'] = False f.storeObject(v[0], cool.ValidityKeyMax, sol, 1) # channel 1 f.storeObject(v[0], cool.ValidityKeyMax, tor, 3) # channel 3 # print database content act = AtlCoolTool.AtlCoolTool(db) print(act.more(folder))
def build_folder(cls, db, folder_name, multiversion, record): """ Create `folder_name` on database instance `db`, with recordspecification `record`. Also creates folderset to which folder_name belongs if necessary. """ from PyCool import cool folderset_path = dirname(folder_name) try: db.getFolderSet(folderset_path) except Exception as error: caught_error = "Folder set %s not found" % folderset_path if caught_error not in error.args[0]: raise log.debug("Folderset doesn't exist - creating it.") db.createFolderSet(folderset_path, "", True) if not isinstance(record, cool.RecordSpecification): record_spec = cool.RecordSpecification() for field in record: record_spec.extend(*field) else: record_spec = record FV = cool.FolderVersioning versioning = FV.MULTI_VERSION if multiversion else FV.SINGLE_VERSION folder_spec = cool.FolderSpecification(versioning, record_spec) folder = db.createFolder(folder_name, folder_spec) payload = cool.Record(record_spec) return folder, payload
def execute(self): folder = '/DMTest/TestAttrList' # do update - setup folder specification and create if needed spec = cool.RecordSpecification() spec.extend("xint", cool.StorageType.Int32) print(">== Store object in folder", folder) cfolder = AtlCoolLib.ensureFolder( self.db, folder, spec, AtlCoolLib.athenaDesc(self.runLumi, 'AthenaAttributeList'), cool.FolderVersioning.MULTI_VERSION) if (cfolder is None): sys.exit(1) # now write data payload = cool.Record(spec) payload['xint'] = self.xint print('>== Store object with IOV [', self.since, ',', self.until, '] and tag', self.tag, 'xint', self.xint) try: if (self.tag == "HEAD"): cfolder.storeObject(self.since, self.until, payload, 0) else: cfolder.storeObject(self.since, self.until, payload, 0, self.tag) print(">== Storing COOL object succeeded. Current content:") except Exception: import traceback traceback.print_exc() print('>== Storing COOL object FAILED') sys.exit(1) # print full content act = AtlCoolTool.AtlCoolTool(self.db) print(act.more(folder))
def execute(self): chanlist = self.getchans() if len(chanlist) == 0: sys.exit(1) # now do update - setup folder specification and create if needed spec = cool.RecordSpecification() spec.extend("NBuilt", cool.StorageType.Int32) print ">== Store object in folder", self.foldername cfolder = AtlCoolLib.ensureFolder( self.db, self.foldername, spec, AtlCoolLib.athenaDesc(True, 'CondAttrListCollection') + '<named/>', cool.FolderVersioning.SINGLE_VERSION) if (cfolder is None): sys.exit(1) # if we do not have data to add - finish here if (not self.isdata): return # now write data payload = cool.Record(spec) payload['NBuilt'] = self.nbuilt for channel in chanlist: print '>== Store object with IOV [', self.since, ',', self.until, '] channel', channel, 'NBuilt=', self.nbuilt try: cfolder.storeObject(self.since, self.until, payload, channel) except Exception, e: print e print 'Exception thrown when storing for channel', channel print '>== Storing COOL object FAILED' sys.exit(1)
def fillPileUpNoiseLumi(db, tag, dataIn, folderName="/CALO/Ofl/Noise/PileUpNoiseLumi"): data = 0. data = dataIn if db.existsFolder(folderName): folder = db.getFolder(folderName) else: #create new folder print "Folder", folderName, "does not exit yet. Creating it now." folder = createFolder(db, folderName) since = cool.ValidityKeyMin until = cool.ValidityKeyMax payload = cool.Record(folder.payloadSpecification()) payload["LBAvInstLumi"] = data payload["Valid"] = 0 folder.storeObject(since, until, payload, cool.ChannelId(0), tag) folder.storeObject(since, until, payload, cool.ChannelId(1), tag) return 0
def fillFolder(folder,data=[],iovMin=cool.ValidityKeyMin,iovMax=cool.ValidityKeyMax): nB1=0 nB2=0 nColl=0 for bcid in data: if (bcid & 0x1): nB1+=1 if (bcid & 0x2): nB2+=1 if (bcid & 0x3 == 0x3): nColl+=1 pass payload=cool.Record(folder.payloadSpecification()) payload['Beam1Bunches']=nB1 payload['Beam2Bunches']=nB2 payload['LuminousBunches']=nColl btype=getattr(ROOT,"coral::Blob") bcmask=btype() bcmask.resize(3564) for i,d in enumerate(data): bcmask[i]=d payload['BCIDmasks']=bcmask print("Storing CaloCondBlob object") folder.storeObject(iovMin, iovMax, payload, cool.ChannelId(0)) return
def CaloCondBlobWriter(spec,valuePairs,defaultValue): vec=getattr(ROOT,'vector<float>') vecvec=getattr(ROOT,'vector<vector<float> >') #g = PyCintex.gbl #PyCintex.makeClass('std::vector<float>') #Build the defintion vector (1 value per gain) gainDefVec = vec()#g.std.vector('float')() gainDefVec.push_back(defaultValue) defVec = vecvec() defVec.push_back(gainDefVec) nLArChannels=182468 # Connected LAr cells nAllChannels=187652 # Connected LAr + Tile cells if len(valuePairs)>nLArChannels: #input data apparently includes also Tile nChannels=nAllChannels else: nChannels=nLArChannels pass print "Build CaloCondBlob object" data = cool.Record( spec ) blob = data['CaloCondBlob16M'] #flt = g.CaloCondBlobFlt.getInstance(blob) fltClass=getattr(ROOT,'CaloCondBlobFlt') flt=fltClass.getInstance(blob) flt.init(defVec,nChannels,1) print "Filling CaloCondBlob object" dvec=vec() dvec.push_back(defaultValue) foundChans=set() for (hashid,value) in valuePairs: if hashid>=nChannels: print "ERROR: Invalid hash id",hashid continue if hashid in foundChans: print "WARNING: Channel with hash",hashid,"encountered twice!" else: foundChans.add(hashid) pass dvec[0]=value flt.setData(hashid,0,dvec) pass if len(foundChans)<nChannels: print "WARNING No values found for",nChannels-len(foundChans),"channels. Left at default value",defaultValue return data
def writeBeamSpotEntry(folderHandle, tag='nominal', runMin=0, runMax=(1 << 31) - 1, lbMin=0, lbMax=(1 << 32) - 2, status=0, posX=0., posY=0., posZ=0., sigmaX=30., sigmaY=30., sigmaZ=500., tiltX=0., tiltY=0., sigmaXY=0., posXErr=0., posYErr=0., posZErr=0., sigmaXErr=0., sigmaYErr=0., sigmaZErr=0., tiltXErr=0., tiltYErr=0., sigmaXYErr=0.): """Write a beam spot entry for a given IOV into a beam spot folder whose 'folderHandle' is passsed. The IOV is specified in terms of run and LB range. Note that lbMax is inclusive. The default parameters for the position and tilt are zero, the ones for the widths are large non-constraining widths of 30mm (500mm) transverse (longitudinal).""" since = (runMin << 32) + lbMin until = (runMax << 32) + lbMax + 1 payload = cool.Record(folderHandle[2]) payload['status'] = int(status) payload['posX'] = float(posX) payload['posY'] = float(posY) payload['posZ'] = float(posZ) payload['sigmaX'] = float(sigmaX) payload['sigmaY'] = float(sigmaY) payload['sigmaZ'] = float(sigmaZ) payload['tiltX'] = float(tiltX) payload['tiltY'] = float(tiltY) payload['sigmaXY'] = float(sigmaXY) payload['posXErr'] = float(posXErr) payload['posYErr'] = float(posYErr) payload['posZErr'] = float(posZErr) payload['sigmaXErr'] = float(sigmaXErr) payload['sigmaYErr'] = float(sigmaYErr) payload['sigmaZErr'] = float(sigmaZErr) payload['tiltXErr'] = float(tiltXErr) payload['tiltYErr'] = float(tiltYErr) payload['sigmaXYErr'] = float(sigmaXYErr) if tag == 'HEAD': folderHandle[1].storeObject(since, until, payload, 0) else: folderHandle[1].storeObject(since, until, payload, 0, tag)
def fetch_for_writing(cls, orig_folder_name, multiversion=True, record=None, create=False, db_override=None): """ Retrieve a folder for writing. Creates it if it doesn't exist. `folder_name` specifies folder to be queried `multiversion` specifies COOL versioning mode `record` is a list of fields to be created in the form: [("<field name>", cool.StorageType.<field type>), ...] or if None, defaults to one Code record, or if isinstance(record, cool.RecordSpecification), uses this. `create` should the database be created if it doesn't `db_override` overrides automatic detection of database string """ from PyCool import cool if record is None: record = [("Code", cool.StorageType.Int32)] database, folder_name = cls.resolve_folder_string(orig_folder_name) if db_override: database = db_override try: db = cls.get_instance(database, False) except Exception as error: if not create or "The database does not exist" not in error.args[0]: raise from PyCool import cool dbService = cool.DatabaseSvcFactory.databaseService() resolved_database, _ = cls.resolve_db_string(database) log.info("Database doesn't exist - creating it.") db = dbService.createDatabase(resolved_database) try: folder = db.getFolder(folder_name) payload = cool.Record(folder.payloadSpecification()) except Exception as error: if not create or "Folder %s not found" % folder_name not in error.args[ 0]: raise log.debug("Folder doesn't exist - creating it.") args = db, folder_name, multiversion, record folder, payload = cls.build_folder(*args) return db, folder, payload
def writeDataToFolder(folder, data): folderTag = "Test-01" for d in data: since = (d["since"][0]<<32) + d["since"][1] until = (d["until"][0]<<32) + d["until"][1] print "since: ",since," until:",until for (ch, v) in enumerate(d["offset"]): record = cool.Record(folder.payloadSpecification()) record["Offset"] = v folder.storeObject(since, until, record, ch, folderTag)
def write_geometry(self, new_file_name): print(" Writing geometry to file ", new_file_name) dbSvc = cool.DatabaseSvcFactory.databaseService() connectString = 'sqlite://;schema=' + new_file_name + ';dbname=L1CALO' print('Writing into database file', new_file_name) db = dbSvc.openDatabase(connectString, False) spec = cool.RecordSpecification() spec.extend('NLayers', cool.StorageType.UChar) spec.extend('Name1', cool.StorageType.UChar) spec.extend('Name2', cool.StorageType.UChar) spec.extend('Name3', cool.StorageType.UChar) spec.extend('Name4', cool.StorageType.UChar) spec.extend('NCells1', cool.StorageType.UChar) spec.extend('NCells2', cool.StorageType.UChar) spec.extend('NCells3', cool.StorageType.UChar) spec.extend('NCells4', cool.StorageType.UChar) now = int(time.time()) since = now * self.UNIX2COOL until = cool.ValidityKeyMax folder_description = "<timeStamp>time</timeStamp><addrHeader><address_header service_type=\"71\" clid=\"1238547719\"/></addrHeader><typeName>CondAttrListCollection</typeName>" f = db.createFolder("/TRIGGER/L1Calo/V1/Results/RxLayers", spec, folder_description) for i in self.NLayers.keys(): data = cool.Record(spec) data['NLayers'] = struct.pack('B', self.NLayers[i]) data['Name1'] = struct.pack('B', self.LayerName[i][0]) data['Name2'] = struct.pack('B', self.LayerName[i][1]) data['Name3'] = struct.pack('B', self.LayerName[i][2]) data['Name4'] = struct.pack('B', self.LayerName[i][3]) data['NCells1'] = struct.pack('B', self.NCells[i][0]) data['NCells2'] = struct.pack('B', self.NCells[i][1]) data['NCells3'] = struct.pack('B', self.NCells[i][2]) data['NCells4'] = struct.pack('B', self.NCells[i][3]) f.storeObject(since, until, data, int(i, 16)) db.closeDatabase() return
def writeBlobToFolder(recordName, folder, data): folderTag = "Test-01" count = 0 record = cool.Record(folder.payloadSpecification()) for d in data: since = (d["since"][0] << 32) + d["since"][1] until = (d["until"][0] << 32) + d["until"][1] #print count record[recordName] = d["blob"] count = count + 1 folder.storeObject(since, until, record, count)
def zeroBlob(self, systemId): """ Resets blob size to zero """ try: chanNum = cool.ChannelId(systemId) data = self.__chanDictRecord.get(systemId) if not data: spec = self.__folder.payloadSpecification() data = cool.Record(spec) self.__chanDictRecord[chanNum] = data blob = data['CaloCondBlob16M'] blob.resize(0) except Exception as e: self.log().critical(e) return None
def execute(self): # do update - setup folder specification and create if needed spec = cool.RecordSpecification() spec.extend("status", cool.StorageType.Int32) spec.extend("posX", cool.StorageType.Float) spec.extend("posY", cool.StorageType.Float) spec.extend("posZ", cool.StorageType.Float) spec.extend("sigmaX", cool.StorageType.Float) spec.extend("sigmaY", cool.StorageType.Float) spec.extend("sigmaZ", cool.StorageType.Float) spec.extend("tiltX", cool.StorageType.Float) spec.extend("tiltY", cool.StorageType.Float) spec.extend("sigmaXY", cool.StorageType.Float) folder = '/Indet/Beampos' print ">== Store object in folder", folder cfolder = AtlCoolLib.ensureFolder( self.db, folder, spec, AtlCoolLib.athenaDesc(self.runLumi, 'AthenaAttributeList'), cool.FolderVersioning.MULTI_VERSION) if (cfolder is None): sys.exit(1) # now write data payload = cool.Record(spec) payload['status'] = self.status payload['posX'] = self.posx payload['posY'] = self.posy payload['posZ'] = self.posz payload['sigmaX'] = self.sigmax payload['sigmaY'] = self.sigmay payload['sigmaZ'] = self.sigmaz payload['tiltX'] = self.tiltx payload['tiltY'] = self.tilty payload['sigmaXY'] = self.sigmaxy print '>== Store object with IOV [', self.since, ',', self.until, '] and tag', self.tag, 'status', self.status print '>== Beamspot position (mm):', self.posx, self.posy, self.posz print '>== Beamspot sigma (mm):', self.sigmax, self.sigmay, self.sigmaz print '>== Beamspot tilt (rad):', self.tiltx, self.tilty try: if (self.tag == "HEAD"): cfolder.storeObject(self.since, self.until, payload, 0) else: cfolder.storeObject(self.since, self.until, payload, 0, self.tag) print ">== Storing COOL object succeeded" except Exception, e: print e print '>== Storing COOL object FAILED' sys.exit(1)
def __init__(self): self.records = [] self.iovStartRun = 0 self.iovStartLumiBlock = 0 self.iovEndRun = 0 self.iovEndLumiBlock = 0 self.tag = "AFPTest-00-00-00" self.folderName = "/FWD/AFP/TEST" self.spec = cool.RecordSpecification() # self.spec.extend("data", cool.StorageType.Blob64k) self.spec.extend("data", cool.StorageType.String16M) # self.desc = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header service_type="71" clid="1238547719" /></addrHeader><typeName>CondAttrListCollection</typeName>' self.desc = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName>' self.data = cool.Record(self.spec) self.folderSpec = cool.FolderSpecification( cool.FolderVersioning.MULTI_VERSION, self.spec)
def createSqlite(sqliteName, folderName, foldertag, iovMin=cool.ValidityKeyMin, iovMax=cool.ValidityKeyMax, inputFileName=None, defaultvalue=1.0): dbSvc = cool.DatabaseSvcFactory.databaseService() if os.access(sqliteName, os.R_OK): print("UPDATING existing sqlite file", sqliteName) db = dbSvc.openDatabase( "sqlite://;schema=" + sqliteName + ";dbname=CONDBR2", False) else: print("Creating new sqlite file", sqliteName) db = dbSvc.createDatabase("sqlite://;schema=" + sqliteName + ";dbname=CONDBR2") spec = cool.RecordSpecification() spec.extend('CaloCondBlob16M', cool.StorageType.Blob16M) desc = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName>' if db.existsFolder(folderName): folder = db.getFolder(folderName) else: print("Creating COOL folder/tag %s/%s" % (folderName, foldertag)) #folder = db.createFolder(folderName, spec, desc, cool.FolderVersioning.MULTI_VERSION, True) folderSpec = cool.FolderSpecification( cool.FolderVersioning.MULTI_VERSION, spec) folder = db.createFolder(folderName, folderSpec, desc, True) pass if inputFileName is None or len(inputFileName) == 0: print("No input given. Create empty blob") data = cool.Record(spec) else: data = CaloCondBlobWriterFromFile(spec, inputFileName, defaultvalue) print("Storing CaloCondBlob object") folder.storeObject(iovMin, iovMax, data, cool.ChannelId(0), foldertag, True) db.closeDatabase()
def writeBlobToFolder(folder, data): folderTag = "Test-01" record = cool.Record(folder.payloadSpecification()) count = 0 for d in data: since = (d["since"][0]<<32) + d["since"][1] until = (d["until"][0]<<32) + d["until"][1] record["Noise"] = d["blob"] folder.storeObject(since, until, record, 0) count = count + 1 if count == 20: break
def _load_folder(self, folder, create_function): """ Internal function used to load a COOL folder """ db_folder_string = "%s::%s" % (self.connection_string, folder) if not self._create: create_function = None db, folder = Databases.get_folder(db_folder_string, read_only=self._read_only, create_function=create_function, also_db=True) self._connections.append(db) payload = cool.Record(folder.payloadSpecification()) return folder, payload
def addChannelPayload(self, channel, since, until=None): since, until = self.__sinceUntil(since, until) if (None in (since, until)): raise DozerError( "wrong arguments for 'since' or 'until' passed to DozerDB.addChannelPayload" ) if (isinstance(channel, DozerChannel) == False): raise DozerError( "wrong type for argument 'channel' in DozerDB.addChannelPayload, should be DozerChannel instance" ) if (not self.isOpened()): raise DozerError("cannot add payload, db is nor opened!") channelName = str(channel.name()) try: if (self.__folder.existsChannel(channelName)): channelId = self.__folder.channelId(channelName) data = cool.Record(self.__recordSpec()) data["xml"] = str(channel.xml().toxml()) self.debug( "storing channel name='%s' since=%s until=%s data=%s" % (channelName, since.AsISO(), until.AsISO(), data["xml"])) self.__folder.storeObject(since.AsCool(), until.AsCool(), data, channelId) self.info("payload for channel name='%s' has been added" % channelName) self.__folder.flushStorageBuffer() return True else: self.error("DozerChannel name='%s' not found in db folder %s" % (channelName, self.__folder.fullPath())) except: self.epanic( "unknow error when processing DozerDB.addChannelPayload") return False
def getCells(self, systemId): """ Returns a CaloCondBlob object of given system Id. """ #try: chanNum = cool.ChannelId(systemId) flt = self.__chanDictCells.get(chanNum, None) #=== ... if not, get it from DB if not flt: #=== create new blob spec = self.__folder.payloadSpecification() data = cool.Record(spec) self.__chanDictRecord[chanNum] = data for key in data: blob = data[key] flt = g.CaloCondBlobFlt.getInstance(blob) self.__chanDictCells[chanNum] = flt return flt
def WriteSqlite(name, input_dict): UNIX2COOL = 1000000000 dbSvc = cool.DatabaseSvcFactory.databaseService() connectString = 'sqlite://;schema=' + name + ';dbname=L1CALO' # folder_name="/TRIGGER/Receivers/Factors/CalibGains" print('\nrecreating database file:', name) dbSvc.dropDatabase(connectString) db = dbSvc.createDatabase(connectString) spec = cool.RecordSpecification() spec.extend("factor", cool.StorageType.Float) spec.extend("status", cool.StorageType.UInt32) folderSpec = cool.FolderSpecification(cool.FolderVersioning.SINGLE_VERSION, spec) now = int(time.time()) since = now * UNIX2COOL # since = 0 # until = sys.maxint until = cool.ValidityKeyMax db.createFolderSet('/TRIGGER') db.createFolderSet('/TRIGGER/Receivers') db.createFolderSet('/TRIGGER/Receivers/Factors') folder_description = '<timeStamp>time</timeStamp><addrHeader><address_header service_type="71" clid="1238547719"/></addrHeader><typeName>CondAttrListCollection</typeName>' f = db.createFolder("/TRIGGER/Receivers/Factors/CalibGains", folderSpec, folder_description) print(" Now creating sqlite file for ", len(input_dict.keys()), " channels") for i in input_dict.keys(): data = cool.Record(spec) data['factor'] = input_dict[i][0] data['status'] = input_dict[i][1] f.storeObject(since, until, data, int(i, 16)) db.closeDatabase()
def execute(self): print '>== Inserting reference to file:', self.file, ' - find GUID' os.system('coolHist_extractFileIdentifier.sh %s' % self.file) guid = open('coolhist_guid.tmp', 'r').readline()[:-1] if (guid == 'none'): print '>== File has no GUID - aborting' sys.exit(1) # setup folder specification and create if needed spec = cool.RecordSpecification() spec.extend("fileGUID", cool.StorageType.String4k) cfolder = AtlCoolLib.ensureFolder( self.db, self.folder, spec, AtlCoolLib.athenaDesc(self.runLumi, 'CondAttrListCollection') + '<named/>', cool.FolderVersioning.MULTI_VERSION) print if (cfolder is None): sys.exit(1) # check if channel number needs to be looked up if (self.channelstr != ""): try: self.channel = cfolder.channelId(self.channelstr) print '>== Channel name', self.channelstr, 'is channelId', self.channel except Exception: print 'Non-existant or invalid channel name:', self.channelstr sys.exit(1) print '>== Write data on COOL connection:', self.conn print '>== To folder:', self.folder, 'channel:', self.channel print '>== COOL tag:', self.tag # now write data payload = cool.Record(spec) payload['fileGUID'] = guid print '>== Store object with IOV [', self.since, ',', self.until, '] channel', self.channel, 'and tag', self.tag try: cfolder.storeObject(self.since, self.until, payload, self.channel, self.tag) return except Exception, e: print e print '>== Storing COOL object FAILED' sys.exit(1)
def addToDatabase(filename, guid, channelName, runStart, runEnd=None): dbSvc = cool.DatabaseSvcFactory.databaseService() try: channel_id = DBInfo.getChannelDict()[channelName] except KeyError: print >> sys.stderr, "Channel", channelName, "does not exist." sys.exit(-1) if runEnd == None: timeEnd = cool.ValidityKeyMax else: timeEnd = runEnd[0] << 32 timeEnd += runEnd[1] timeStart = runStart[ 0] << 32 ## Since the 32 low bits are reserved for LumiBlocks... timeStart += runStart[1] ## add lumiblock if timeStart >= timeEnd: print >> sys.stderr, "Starting timestamp must be LESS than ending timestamp." sys.exit(-1) try: db = dbSvc.openDatabase(connectString, False) except: print >> sys.stderr, "Error opening database." sys.exit(-1) if db.existsFolder(folderName): folder = db.getFolder(folderName) spec = folder.payloadSpecification() else: print >> sys.stderr, "Error: Folder", folderName, "does not exist in database." sys.exit(-1) data = cool.Record(spec) data["filename"] = filename data["guid"] = guid folder.storeObject(timeStart, timeEnd, data, channel_id)
####<timeStamp>time</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName> # create the folder - single version # last argument is createParents - if true, automatically creates parent folders if needed # note this will not work if the database already exists - delete mycool.db first #folder=db.createFolder(fdname,spec,desc,cool.FolderVersioning.MULTI_VERSION,True) folder = db.createFolder(fdname, spec, desc, True) folder.createChannel(0, "muonalignmenterrors", "muon alignment spectrometer errors") # ----------------------------------------------------------------------- # # now fill in data - create a record and fill it # ----------------------------------------------------------------------- # data = cool.Record(recspec) clob = "#This is an empty clob \n" # FIRST COLUMN: for documentation/version data['version'] = 'V0: Test' # SECOND COLUMN: contains the actual input # reading from the ASCII file infile = open('input.txt', 'r') inputsys = infile.read() infile.close() data['syserrors'] = inputsys #'# Empty Errors' print "Will store this object: " print data['syserrors'] chann = 0 # store object with IOV valid from 0-max, channel 0
def BadChan_SaveBadChannelCorrectionsToDatabase(self, dbstring, dbSvc, dbFolderName, selectedTag): listKeys = self.dict_vectBadChanEntry.keys() listKeys.sort() # Create LArBadChannel object for defined BadChanEntry vector and coolChan self.class_LArBadChannelState = PyCintex.makeClass( 'LArBadChannelState') inst_larBadChannelState = self.class_LArBadChannelState() # Loop over cool channels bStoreNewCoolChannels = False bNewDBCreated = False for coolChan in listKeys: vect_BadChanEntry = PyCintex.gbl.std.vector( 'std::pair<HWIdentifier,LArBadChannel>')() listHWidKeys = [ x for x in self.dict_vectBadChanEntry[coolChan].keys() ] listHWidKeys.sort() iNbCorrection = 0 for key in listHWidKeys: sHWid = key if self.dict_vectBadChanEntry_Status[coolChan][ sHWid] == STATUS_INIT: badChan_word = self.dict_vectBadChanEntry_Init[coolChan][ sHWid][1] elif self.dict_vectBadChanEntry_Status[coolChan][ sHWid] == STATUS_MODIFIED or self.dict_vectBadChanEntry_Status[ coolChan][sHWid] == STATUS_NEW: iNbCorrection += 1 badChan_word = self.dict_vectBadChanEntry[coolChan][sHWid][ 1] elif self.dict_vectBadChanEntry_Status[coolChan][ sHWid] == STATUS_REMOVED: iNbCorrection += 1 continue obj_HWid = self.class_HWIdentifier() obj_HWid.set(sHWid) larBadChannel = self.class_LArBadChannel(badChan_word) pair_BadChanEntry = PyCintex.gbl.pair( 'HWIdentifier,LArBadChannel')(obj_HWid, larBadChannel) vect_BadChanEntry.push_back(pair_BadChanEntry) # if correction were made => store BadChanEntry vector if iNbCorrection > 0: for sEntry in vect_BadChanEntry: inst_larBadChannelState.add(sEntry, coolChan) bStoreNewCoolChannels = True else: continue # Create object based on new LArBadChannelState (via LArBadChannelDBTools python interface) attrListSpec = PyCintex.gbl.coral.AttributeListSpecification() athenaAttrList = PyCintex.gbl.AthenaAttributeList() attrListSpec = self.nspace_LArBadChannelDBTools.createCoolSpec() athenaAttrList = self.nspace_LArBadChannelDBTools.createPayload( inst_larBadChannelState.coolChannel(coolChan), attrListSpec) # if save DB has not been created => do it if bNewDBCreated == False: import os try: dbSave = dbSvc.createDatabase(dbstring) except Exception, e: print 'Problem opening database', e sys.exit(-1) print "Opened database", dbstring desc = '<timeStamp>run-event</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName>' # Create cool spec from AttributeListSpec coolSpec = cool.RecordSpecification() for iElemt in range(0, attrListSpec.size()): attrSpec = attrListSpec[iElemt] typeName = attrSpec.typeName() if typeName == "unsigned int": coolSpec.extend(attrSpec.name(), cool.StorageType.UInt32) elif typeName == "blob": coolSpec.extend(attrSpec.name(), cool.StorageType.Blob64k) else: print "Undefined cool.StorageType " + typeName # myfolder=dbSave.createFolder(dbFolderName, coolSpec, desc, cool.FolderVersioning.SINGLE_VERSION,True) myfolder = dbSave.createFolder( dbFolderName, coolSpec, desc, cool.FolderVersioning.MULTI_VERSION, True) import string IOVBeginEnd = ["90", "9999999"] beginRun = string.atoi(IOVBeginEnd[0]) << 32 endRun = string.atoi(IOVBeginEnd[1]) << 32 bNewDBCreated = True # Create cool payload from AthenaAttributeList payload coolPayload = cool.Record(coolSpec) for iElemt in range(0, attrListSpec.size()): attrSpec = attrListSpec[iElemt] coolPayload[attrSpec.name()] = athenaAttrList[attrSpec.name()] # Store cool object to folder myfolder.storeObject(beginRun, endRun, coolPayload, coolChan, selectedTag)
def genDb(self, dbFileName, dbName, params, folderPath, params64={}): # Do checks if self.beginRun is None: raise ParameterDbFillerError( 'Must set begin run number before generating db') if self.endRun is None: raise ParameterDbFillerError( 'Must set end run number before generating db') if len(params) == 0: raise ParameterDbFillerError('No parameters for db ' + dbName) # remove existing db, if any try: os.remove(dbFileName) print("ParameterDbFiller.genDb: Removed db", dbFileName) except Exception: pass # get database service and open database dbSvc = cool.DatabaseSvcFactory.databaseService() # build dbstring - database accessed via physical name dbstring = "sqlite://;schema=" + dbFileName + ";dbname=" + dbName try: db = dbSvc.createDatabase(dbstring) except Exception as e: print('ParameterDbFiller.genDb: Problem creating database', e) sys.exit(-1) print("ParameterDbFiller.genDb: Created database", dbstring) # setup a folder payload specification spec = cool.RecordSpecification() for key in params: spec.extend(key, cool.StorageType.String4k) pass # add in spec for long strings for key in params64: spec.extend(key, cool.StorageType.String64k) # folder meta-data - note for Athena this has a special meaning desc = '<timeStamp>run-event</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName>' # create the folder - single version # last argument is createParents - if true, automatically creates parent # folders if needed # note this will not work if the database already exists - delete mycool.db first # myfolder=db.createFolder(folderPath, spec, desc, cool.FolderVersioning.SINGLE_VERSION,True) folderSpec = cool.FolderSpecification( cool.FolderVersioning.SINGLE_VERSION, spec) myfolder = db.createFolder(folderPath, folderSpec, desc, True) # now fill in parameters data = cool.Record(spec) for k, v in six.iteritems(params): data[k] = v for k, v in six.iteritems(params64): data[k] = v print("ParameterDbFiller.genDb: Recording parameters", data) # store object with IOV valid from 0-10, channel 3 myfolder.storeObject(self.beginRun, self.endRun, data, 0) print("ParameterDbFiller.genDb: Stored object") # finalize db.closeDatabase() # Try to dump out db as test self.dumpDb(dbstring)
# check if folder exists if (not db.existsFolder(foldername)): print "Attempt to create", foldername desc = '<timeStamp>run-event</timeStamp><addrHeader><address_header service_type=\"71\" clid=\"1238547719\" /></addrHeader><typeName>CondAttrListCollection</typeName>' db.createFolder(foldername, spec, desc, cool.FolderVersioning.MULTI_VERSION, True) print 'Folder', foldername, 'created OK' # now write data folder = db.getFolder(foldername) chan = 0 print "Writing data to", folder, "with tag", tag for idata in dataset: name = idata[0] nvals = len(idata) - 1 if (nvals > maxpar): print "Maximum of", maxpar, "parameters allowed - additional ones ignored!" nvals = maxpar print "Write data for name", name, "number of values", nvals, "at channel", chan payload = cool.Record(spec) payload['Name'] = name payload['Len'] = nvals for i in range(0, nvals): payload['par' + str(1 + i)] = idata[1 + i] folder.storeObject(cool.ValidityKeyMin, cool.ValidityKeyMax, payload, chan, tag) chan += 1 print "All done" db.closeDatabase()
class ParameterDbFiller(object): """ A simple class used to fill SQLite db file with parameters for simulation and/or digitization. """ def __init__(self): object.__init__(self) self.beginRun = None self.endRun = None self.simParams = {} self.digitParams = {} self.digitParams64 = {} return # def setBeginRun(self, beginRun): self.beginRun = beginRun << 32 return def setEndRun(self, endRun): self.endRun = endRun << 32 return def addSimParam(self, name, value): self.simParams[name] = value return def addDigitParam(self, name, value): self.digitParams[name] = value return def addDigitParam64(self, name, value): self.digitParams64[name] = value return def genSimDb(self, dbFileName = None): # Allow to define specific file name, otherwise use customDb = False if dbFileName == None: # Set to default value dbFileName = "SimParams.db" customDb = True # Generate db: args - file name, dbname, params, folder path self.genDb(dbFileName, 'SIMPARAM', self.simParams, '/Simulation/Parameters') def genDigitDb(self, dbFileName = None): # Allow to define specific file name, otherwise use customDb = False if dbFileName == None: # Set to default value dbFileName = "DigitParams.db" customDb = True # Generate db: args - file name, dbname, params, folder path self.genDb(dbFileName, 'DIGPARAM', self.digitParams, '/Digitization/Parameters', self.digitParams64) def genDb(self, dbFileName, dbName, params, folderPath, params64 = {}): # Do checks if self.beginRun == None: raise ParameterDbFillerError, 'Must set begin run number before generating db' if self.endRun == None: raise ParameterDbFillerError, 'Must set end run number before generating db' if len(params) == 0: raise ParameterDbFillerError, 'No parameters for db ' + dbName # remove existing db, if any try: os.remove(dbFileName) print "ParameterDbFiller.genDb: Removed db", dbFileName except: pass # get database service and open database dbSvc=cool.DatabaseSvcFactory.databaseService() # build dbstring - database accessed via physical name dbstring="sqlite://;schema=" + dbFileName + ";dbname=" + dbName try: db=dbSvc.createDatabase(dbstring) except Exception,e: print 'ParameterDbFiller.genDb: Problem creating database',e sys.exit(-1) print "ParameterDbFiller.genDb: Created database",dbstring # setup a folder payload specification spec=cool.RecordSpecification() for key in params: spec.extend(key, cool.StorageType.String4k) pass # add in spec for long strings for key in params64: spec.extend(key, cool.StorageType.String64k) # folder meta-data - note for Athena this has a special meaning desc = '<timeStamp>run-event</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName>' # create the folder - single version # last argument is createParents - if true, automatically creates parent # folders if needed # note this will not work if the database already exists - delete mycool.db first # myfolder=db.createFolder(folderPath, spec, desc, cool.FolderVersioning.SINGLE_VERSION,True) folderSpec = cool.FolderSpecification(cool.FolderVersioning.SINGLE_VERSION, spec) myfolder = db.createFolder(folderPath, folderSpec, desc, True) # now fill in parameters data = cool.Record(spec) for k, v in params.iteritems(): data[k] = v for k, v in params64.iteritems(): data[k] = v print "ParameterDbFiller.genDb: Recording parameters", data # store object with IOV valid from 0-10, channel 3 myfolder.storeObject(self.beginRun, self.endRun, data, 0) print "ParameterDbFiller.genDb: Stored object" # finalize db.closeDatabase() # Try to dump out db as test self.dumpDb(dbstring)
# folder now exists, prepare for bulk update nobj = 0 nbad = 0 cfolder.setupStorageBuffer() if (self.tag == ''): print "Storing data to HEAD" else: print "Storing data to tag %s" % self.tag for (chan, slist) in statuslists.items(): size1 = slist.size() size2 = slist.compress() print "List for channel %i size %i / %i after compression" % ( chan, size1, size2) for sobj in slist.list(): data = cool.Record(spec) data['Code'] = sobj.code data['deadFrac'] = sobj.deadfrac data['Thrust'] = sobj.thrust if self.numbers: data['NConfig'] = sobj.nconfig data['NWorking'] = sobj.nworking if spec.exists('Comment'): data['Comment'] = sobj.comment since = sobj.start until = sobj.stop # truncate IOV if needed if (self.truncate and (sobj.start < self.since or sobj.stop > self.until)): since = max(sobj.start, self.since) until = min(sobj.stop, self.until)
class LArHVMapDbFiller(object): """ A simple class used to fill SQLite db file with parameters for simulation and/or digitization. """ def __init__(self): object.__init__(self) self.beginTime = None self.endTime = None self.FileName = None self.FolderTag = None self.Params = {} return # def setBegin(self, run, lumi): print " setBegin run,lumi ", run self.beginTime = (long(run) << 32) + long(lumi) print " time ", self.beginTime return def setEnd(self, run, lumi): print "setEnd runmlumi ", run self.endTime = (long(run) << 32) + long(lumi) print " time ", self.endTime return def setFileName(self, fileName): self.FileName = fileName return def setFolderTag(self, folderTag): self.FolderTag = folderTag return def readFile(self): value = '' f = file(self.FileName, 'r') for lines in f: value += lines self.Params['LArHVMap'] = value return def genDb(self): dbFileName = 'HVmap.db' dbName = 'COMP200' folderPath = '/LAR/IdentifierOfl/HVLineToElectrodeMap' # Do checks if self.beginTime == None: raise LArHVMapDbFillerError, 'Must set begin run number before generating db' if self.endTime == None: self.endTime = cool.ValidityKeyMax if self.FileName == None: raise LArHVMapDbFillerError, 'Must give an input file for LArHVToElectrode.data' if self.FolderTag == None: raise LArHVMapDbFillerError, 'Must give a folder tag' self.readFile() if len(self.Params) == 0: raise LArHVMapDbFillerError, 'No parameters for db ' + dbName # remove existing db, if any try: os.remove(dbFileName) print "LArHVMapDbFiller.genDb: Removed db", dbFileName except: pass # get database service and open database dbSvc = cool.DatabaseSvcFactory.databaseService() # build dbstring - database accessed via physical name dbstring = "sqlite://;schema=" + dbFileName + ";dbname=" + dbName try: db = dbSvc.createDatabase(dbstring) except Exception, e: print 'LArHVMapDbFiller.genDb: Problem creating database', e sys.exit(-1) print "LArHVMapDbFiller.genDb: Created database", dbstring # setup a folder payload specification spec = cool.RecordSpecification() for key in self.Params: spec.extend(key, cool.StorageType.String16M) # folder meta-data - note for Athena this has a special meaning desc = '<timeStamp>run-lumi</timeStamp><addrHeader><address_header service_type="71" clid="40774348" /></addrHeader><typeName>AthenaAttributeList</typeName>' # create the folder - single version # last argument is createParents - if true, automatically creates parent # folders if needed # note this will not work if the database already exists - delete mycool.db first myfolder = db.createFolder(folderPath, spec, desc, cool.FolderVersioning.MULTI_VERSION, True) # now fill in simlation parameters data = cool.Record(spec) for k, v in self.Params.iteritems(): data[k] = v print "LArHVMapDbFiller.genDb: Recording parameters", data # store object with IOV valid from 0-10, channel 3 myfolder.storeObject(self.beginTime, self.endTime, data, 0, self.FolderTag) print "LArHVMapDbFiller.genDb: Stored object" # finalize db.closeDatabase() # Try to dump out db as test self.dumpDb(dbstring)