def __init__(self, componentName, pollInterval=None, heartbeatTimeout=7200, logger=None, dbi=None): """ ___init___ Initialize all the database connection attributes and the logging attributes. Every worker has a different instance of this class, thus they can have a different polling interval and a different heartbeat timeout. Finally, check to see if a transaction object has been created. If none exists, create one but leave the transaction closed. """ self.componentName = componentName self.pollInterval = pollInterval self.heartbeatTimeout = heartbeatTimeout or 7200 self.compId = os.getpid() WMConnectionBase.__init__(self, daoPackage="WMCore.Agent.Database", logger=logger, dbi=dbi) self.insertComp = self.daofactory(classname="InsertComponent") self.existWorker = self.daofactory(classname="ExistWorker") self.insertWorker = self.daofactory(classname="InsertWorker") self.updateWorker = self.daofactory(classname="UpdateWorker") self.updateErrorWorker = self.daofactory(classname="UpdateWorkerError") self.getHeartbeat = self.daofactory(classname="GetHeartbeatInfo") self.getAllHeartbeat = self.daofactory(classname="GetAllHeartbeatInfo")
def __init__(self, options): """ __init__ Initialize the object """ self.configFilePath = options.configFilePath self.inputDataFilePath = options.inputDataFilePath self.filesPerBlock = options.filesPerBlock self.timeToClose = options.timeToClose self.setup() WMConnectionBase.__init__(self, "WMCore.WMBS") self.dbsFilesToCreate = [] self.datasetAlgoID = collections.deque(maxlen = 1000) self.datasetAlgoPaths = collections.deque(maxlen = 1000) self.dbsLocations = collections.deque(maxlen = 1000) self.workflowIDs = collections.deque(maxlen = 1000) self.workflowPaths = collections.deque(maxlen = 1000) myThread = threading.currentThread() self.dbsDaoFactory = DAOFactory(package = "WMComponent.DBS3Buffer", logger = myThread.logger, dbinterface = myThread.dbi) self.dbsCreateFiles = self.dbsDaoFactory(classname = "DBSBufferFiles.Add") self.dbsSetLocation = self.dbsDaoFactory(classname = "DBSBufferFiles.SetLocationByLFN") self.dbsInsertLocation = self.dbsDaoFactory(classname = "DBSBufferFiles.AddLocation") self.dbsSetChecksum = self.dbsDaoFactory(classname = "DBSBufferFiles.AddChecksumByLFN") self.dbsSetRunLumi = self.dbsDaoFactory(classname = "DBSBufferFiles.AddRunLumi") self.dbsInsertWorkflow = self.dbsDaoFactory(classname = "InsertWorkflow") return
def __init__(self, config, couchDbName=None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName == None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName try: self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self.jobsdatabase = self.couchdb.connectDatabase("%s/jobs" % self.dbname, size=250) self.fwjrdatabase = self.couchdb.connectDatabase("%s/fwjrs" % self.dbname, size=250) self.jsumdatabase = self.couchdb.connectDatabase(getattr( self.config.JobStateMachine, 'jobSummaryDBName'), size=250) except Exception, ex: logging.error("Error connecting to couch: %s" % str(ex)) self.jobsdatabase = None self.fwjrdatabase = None self.jsumdatabase = None
def __init__(self, config, couchDbName = None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName == None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName self.jobsdatabase = None self.fwjrdatabase = None self.jsumdatabase = None self.statsumdatabase = None self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self._connectDatabases() try: self.dashboardReporter = DashboardReporter(config) except Exception as ex: logging.error("Error setting up the \ - dashboard reporter: %s" % str(ex)) raise self.getCouchDAO = self.daofactory("Jobs.GetCouchID") self.setCouchDAO = self.daofactory("Jobs.SetCouchID") self.incrementRetryDAO = self.daofactory("Jobs.IncrementRetry") self.workflowTaskDAO = self.daofactory("Jobs.GetWorkflowTask") self.jobTypeDAO = self.daofactory("Jobs.GetType") self.updateLocationDAO = self.daofactory("Jobs.UpdateLocation") self.maxUploadedInputFiles = getattr(self.config.JobStateMachine, 'maxFWJRInputFiles', 1000) return
def __init__(self, config, couchDbName = None, couchurl = None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") self.designDoc = "HarvestingDatasets" if couchDbName == None: self.dbname = getattr(self.config.HarvestingScheduler, "couchDBName", "dqm_default") else: self.dbname = couchDbName if couchurl is not None: self.couchurl = couchurl elif getattr(self.config.HarvestingScheduler, "couchurl", None) is not None: self.couchurl = self.config.HarvestingScheduler.couchurl else: self.couchurl = self.config.JobStateMachine.couchurl try: self.couchdb = CouchServer(self.couchurl) if self.dbname not in self.couchdb.listDatabases(): self.createDatabase() self.database = self.couchdb.connectDatabase(self.dbname, size=_LIMIT) except Exception, ex: logging.error("Error connecting to couch: %s" % str(ex)) self.database = None
def __init__(self, path, id=-1, processingVer=None, acquisitionEra=None, validStatus=None, globalTag=None, parent=None, prep_id=None): """ Initialize the stored attributes and database connection. """ WMConnectionBase.__init__(self, daoPackage="WMComponent.DBS3Buffer") # Fill out the attributes self['path'] = path self['id'] = id self['processingVer'] = processingVer self['acquisitionEra'] = acquisitionEra self['validStatus'] = validStatus self['globalTag'] = globalTag self['parent'] = parent self['prep_id'] = prep_id self['subscriptions'] = []
def __init__(self, config = None): WMConnectionBase.__init__(self, daoPackage = "WMCore.ResourceControl") self.wmbsDAOFactory = DAOFactory(package = "WMCore.WMBS", logger = self.logger, dbinterface = self.dbi) self.config = config return
def __init__(self, config=None): WMConnectionBase.__init__(self, daoPackage="WMCore.ResourceControl") self.wmbsDAOFactory = DAOFactory(package="WMCore.WMBS", logger=self.logger, dbinterface=self.dbi) self.config = config return
def __init__(self, wmSpec, taskName, blockName=None, mask=None, cachepath='.'): """ _init_ Initialize DAOs and other things needed. """ self.block = blockName self.mask = mask self.wmSpec = wmSpec self.topLevelTask = wmSpec.getTask(taskName) self.cachepath = cachepath self.isDBS = True self.topLevelFileset = None self.topLevelSubscription = None self.topLevelTaskDBSBufferId = None self.mergeOutputMapping = {} # Initiate the pieces you need to run your own DAOs WMConnectionBase.__init__(self, "WMCore.WMBS") myThread = threading.currentThread() self.dbsDaoFactory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) # DAOs from WMBS for file commit self.setParentage = self.daofactory(classname="Files.SetParentage") self.setFileLocation = self.daofactory( classname="Files.SetLocationForWorkQueue") self.setFileAddChecksum = self.daofactory( classname="Files.AddChecksumByLFN") self.addFileAction = self.daofactory(classname="Files.Add") self.addToFileset = self.daofactory(classname="Files.AddDupsToFileset") self.getLocations = self.daofactory(classname="Locations.ListSites") self.getLocationInfo = self.daofactory( classname="Locations.GetSiteInfo") # DAOs from DBSBuffer self.dbsCreateFiles = self.dbsDaoFactory( classname="DBSBufferFiles.Add") self.dbsSetLocation = self.dbsDaoFactory( classname="DBSBufferFiles.SetLocationByLFN") self.dbsInsertLocation = self.dbsDaoFactory( classname="DBSBufferFiles.AddLocation") self.dbsSetChecksum = self.dbsDaoFactory( classname="DBSBufferFiles.AddChecksumByLFN") self.dbsInsertWorkflow = self.dbsDaoFactory(classname="InsertWorkflow") # Added for file creation bookkeeping self.dbsFilesToCreate = set() self.wmbsFilesToCreate = set() self.insertedBogusDataset = -1 return
def __init__(self, config, couchDbName=None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName is None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName self.jobsdatabase = None self.fwjrdatabase = None self.jsumdatabase = None self.statsumdatabase = None self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self._connectDatabases() try: self.dashboardReporter = DashboardReporter(config) except Exception as ex: logging.error("Error setting up the dashboard reporter: %s", str(ex)) raise self.getCouchDAO = self.daofactory("Jobs.GetCouchID") self.setCouchDAO = self.daofactory("Jobs.SetCouchID") self.incrementRetryDAO = self.daofactory("Jobs.IncrementRetry") self.workflowTaskDAO = self.daofactory("Jobs.GetWorkflowTask") self.jobTypeDAO = self.daofactory("Jobs.GetType") self.updateLocationDAO = self.daofactory("Jobs.UpdateLocation") self.getWorkflowSpecDAO = self.daofactory("Workflow.GetSpecAndNameFromTask") self.maxUploadedInputFiles = getattr(self.config.JobStateMachine, 'maxFWJRInputFiles', 1000) self.workloadCache = {} return
def __init__(self): """ ___init___ Initialize all the database connection attributes and the logging attritbutes. Create a DAO factory for WMCore.WMBS as well. Finally, check to see if a transaction object has been created. If none exists, create one but leave the transaction closed. """ WMConnectionBase.__init__(self, daoPackage="WMCore.WMBS")
def __init__(self): """ ___init___ Initialize all the database connection attributes and the logging attritbutes. Create a DAO factory for WMCore.WMBS as well. Finally, check to see if a transaction object has been created. If none exists, create one but leave the transaction closed. """ WMConnectionBase.__init__(self, daoPackage = "WMCore.WMBS")
def __init__(self, componentName, logger=None, dbi=None): """ ___init___ Initialize all the database connection attributes and the logging attritbutes. Create a DAO factory for WMCore.WorkQueue as well. Finally, check to see if a transaction object has been created. If none exists, create one but leave the transaction closed. """ WMConnectionBase.__init__(self, daoPackage = "WMCore.Agent.Database", logger = logger, dbi = dbi) self.componentName = componentName self.pid = os.getpid()
def __init__(self, config, insertStates=False): """ __init__ BossAir should work with the standard config structure of WMAgent """ WMConnectionBase.__init__(self, daoPackage="WMCore.BossAir") myThread = threading.currentThread() self.config = config self.plugins = {} self.states = [] self.jobs = [] self.pluginDir = config.BossAir.pluginDir # This is the default state jobs are created in self.newState = getattr(config.BossAir, 'newState', 'New') # Get any proxy info self.checkProxy = getattr(config.BossAir, 'checkProxy', False) self.cert = getattr(config.BossAir, 'cert', None) self.stateMachine = ChangeState(self.config) # Create a factory to load plugins self.pluginFactory = WMFactory("plugins", self.pluginDir) self.daoFactory = DAOFactory(package="WMCore.BossAir", logger=myThread.logger, dbinterface=myThread.dbi) self.deleteDAO = self.daoFactory(classname="DeleteJobs") self.stateDAO = self.daoFactory(classname="NewState") self.loadByWMBSDAO = self.daoFactory(classname="LoadByWMBSID") self.updateDAO = self.daoFactory(classname="UpdateJobs") self.newJobDAO = self.daoFactory(classname="NewJobs") self.runningJobDAO = self.daoFactory(classname="LoadRunning") self.completeJobDAO = self.daoFactory(classname="LoadComplete") self.loadJobsDAO = self.daoFactory(classname="LoadByStatus") self.completeDAO = self.daoFactory(classname="CompleteJob") self.monitorDAO = self.daoFactory(classname="JobStatusForMonitoring") self.states = None self.loadPlugin(insertStates) return
def __init__(self, wmSpec, taskName, blockName = None, mask = None, cachepath = '.'): """ _init_ Initialize DAOs and other things needed. """ self.block = blockName self.mask = mask self.wmSpec = wmSpec self.topLevelTask = wmSpec.getTask(taskName) self.cachepath = cachepath self.isDBS = True self.topLevelFileset = None self.topLevelSubscription = None self.topLevelTaskDBSBufferId = None self.mergeOutputMapping = {} # Initiate the pieces you need to run your own DAOs WMConnectionBase.__init__(self, "WMCore.WMBS") myThread = threading.currentThread() self.dbsDaoFactory = DAOFactory(package = "WMComponent.DBS3Buffer", logger = myThread.logger, dbinterface = myThread.dbi) # DAOs from WMBS for file commit self.setParentage = self.daofactory(classname = "Files.SetParentage") self.setFileRunLumi = self.daofactory(classname = "Files.AddRunLumi") self.setFileLocation = self.daofactory(classname = "Files.SetLocationForWorkQueue") self.setFileAddChecksum = self.daofactory(classname = "Files.AddChecksumByLFN") self.addFileAction = self.daofactory(classname = "Files.Add") self.addToFileset = self.daofactory(classname = "Files.AddDupsToFileset") self.getLocations = self.daofactory(classname = "Locations.ListSites") self.getLocationInfo = self.daofactory(classname = "Locations.GetSiteInfo") # DAOs from DBSBuffer self.dbsCreateFiles = self.dbsDaoFactory(classname = "DBSBufferFiles.Add") self.dbsSetLocation = self.dbsDaoFactory(classname = "DBSBufferFiles.SetLocationByLFN") self.dbsInsertLocation = self.dbsDaoFactory(classname = "DBSBufferFiles.AddLocation") self.dbsSetChecksum = self.dbsDaoFactory(classname = "DBSBufferFiles.AddChecksumByLFN") self.dbsInsertWorkflow = self.dbsDaoFactory(classname = "InsertWorkflow") # Added for file creation bookkeeping self.dbsFilesToCreate = [] self.addedLocations = [] self.wmbsFilesToCreate = [] self.insertedBogusDataset = -1 return
def __init__(self, componentName, logger=None, dbi=None): """ ___init___ Initialize all the database connection attributes and the logging attritbutes. Create a DAO factory for WMCore.WorkQueue as well. Finally, check to see if a transaction object has been created. If none exists, create one but leave the transaction closed. """ WMConnectionBase.__init__(self, daoPackage="WMCore.Agent.Database", logger=logger, dbi=dbi) self.componentName = componentName self.pid = os.getpid()
def __init__(self, config, couchDbName = None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName == None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName try: self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self.jobsdatabase = self.couchdb.connectDatabase("%s/jobs" % self.dbname) self.fwjrdatabase = self.couchdb.connectDatabase("%s/fwjrs" % self.dbname) except Exception, ex: logging.error("Error connecting to couch: %s" % str(ex)) self.jobsdatabase = None self.fwjrdatabase = None
def __init__(self, config, couchDbName = None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName == None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self._connectDatabases() try: self.dashboardReporter = DashboardReporter(config) except Exception, ex: logging.error("Error setting up the \ - dashboard reporter: %s" % str(ex)) raise
def __init__(self, config, couchDbName=None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName == None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self._connectDatabases() try: self.dashboardReporter = DashboardReporter(config) except Exception, ex: logging.error("Error setting up the \ - dashboard reporter: %s" % str(ex)) raise
def __init__(self, path, id = -1, processingVer = None, acquisitionEra = None, validStatus = None, globalTag = None, parent = None): """ Initialize the stored attributes and database connection. """ WMConnectionBase.__init__(self, daoPackage = "WMComponent.DBS3Buffer") # Fill out the attributes self['path'] = path self['id'] = id self['processingVer'] = processingVer self['acquisitionEra'] = acquisitionEra self['validStatus'] = validStatus self['globalTag'] = globalTag self['parent'] = parent self['subscriptions'] = []
def __init__(self, componentName, pollInterval=None, logger=None, dbi=None): """ ___init___ Initialize all the database connection attributes and the logging attritbutes. Create a DAO factory for WMCore.WorkQueue as well. Finally, check to see if a transaction object has been created. If none exists, create one but leave the transaction closed. """ WMConnectionBase.__init__(self, daoPackage = "WMCore.Agent.Database", logger = logger, dbi = dbi) self.insertComp = self.daofactory(classname = "InsertComponent") self.existWorker = self.daofactory(classname = "ExistWorker") self.insertWorker = self.daofactory(classname="InsertWorker") self.updateWorker = self.daofactory(classname="UpdateWorker") self.updateErrorWorker = self.daofactory(classname = "UpdateWorkerError") self.getHeartbeat = self.daofactory(classname = "GetHeartbeatInfo") self.getAllHeartbeat = self.daofactory(classname = "GetAllHeartbeatInfo") self.componentName = componentName self.pid = os.getpid() self.pollInterval = pollInterval
def __init__(self, config): """ __init__ Create all DAO objects that are used by this class. """ WMConnectionBase.__init__(self, "WMCore.WMBS") myThread = threading.currentThread() self.dbsDaoFactory = DAOFactory(package = "WMComponent.DBS3Buffer", logger = myThread.logger, dbinterface = myThread.dbi) self.getOutputMapAction = self.daofactory(classname = "Jobs.GetOutputMap") self.bulkAddToFilesetAction = self.daofactory(classname = "Fileset.BulkAddByLFN") self.bulkParentageAction = self.daofactory(classname = "Files.AddBulkParentage") self.getJobTypeAction = self.daofactory(classname = "Jobs.GetType") self.getParentInfoAction = self.daofactory(classname = "Files.GetParentInfo") self.setParentageByJob = self.daofactory(classname = "Files.SetParentageByJob") self.setFileRunLumi = self.daofactory(classname = "Files.AddRunLumi") self.setFileLocation = self.daofactory(classname = "Files.SetLocationByLFN") self.setFileAddChecksum = self.daofactory(classname = "Files.AddChecksumByLFN") self.addFileAction = self.daofactory(classname = "Files.Add") self.jobCompleteInput = self.daofactory(classname = "Jobs.CompleteInput") self.setBulkOutcome = self.daofactory(classname = "Jobs.SetOutcomeBulk") self.getWorkflowSpec = self.daofactory(classname = "Workflow.GetSpecAndNameFromTask") self.dbsStatusAction = self.dbsDaoFactory(classname = "DBSBufferFiles.SetStatus") self.dbsParentStatusAction = self.dbsDaoFactory(classname = "DBSBufferFiles.GetParentStatus") self.dbsChildrenAction = self.dbsDaoFactory(classname = "DBSBufferFiles.GetChildren") self.dbsCreateFiles = self.dbsDaoFactory(classname = "DBSBufferFiles.Add") self.dbsSetLocation = self.dbsDaoFactory(classname = "DBSBufferFiles.SetLocationByLFN") self.dbsInsertLocation = self.dbsDaoFactory(classname = "DBSBufferFiles.AddLocation") self.dbsSetChecksum = self.dbsDaoFactory(classname = "DBSBufferFiles.AddChecksumByLFN") self.dbsSetRunLumi = self.dbsDaoFactory(classname = "DBSBufferFiles.AddRunLumi") self.insertWorkflow = self.dbsDaoFactory(classname = "InsertWorkflow") self.dbsNewAlgoAction = self.dbsDaoFactory(classname = "NewAlgo") self.dbsNewDatasetAction = self.dbsDaoFactory(classname = "NewDataset") self.dbsAssocAction = self.dbsDaoFactory(classname = "AlgoDatasetAssoc") self.dbsExistsAction = self.dbsDaoFactory(classname = "DBSBufferFiles.ExistsForAccountant") self.dbsLFNHeritage = self.dbsDaoFactory(classname = "DBSBufferFiles.BulkHeritageParent") self.dbsSetDatasetAlgoAction = self.dbsDaoFactory(classname = "SetDatasetAlgo") self.stateChanger = ChangeState(config) # Decide whether or not to attach jobReport to returned value self.returnJobReport = getattr(config.JobAccountant, 'returnReportFromWorker', False) # Store location for the specs for DBS self.specDir = getattr(config.JobAccountant, 'specDir', None) # Hold data for later commital self.dbsFilesToCreate = [] self.wmbsFilesToBuild = [] self.fileLocation = None self.mergedOutputFiles = [] self.listOfJobsToSave = [] self.listOfJobsToFail = [] self.filesetAssoc = [] self.count = 0 self.datasetAlgoID = collections.deque(maxlen = 1000) self.datasetAlgoPaths = collections.deque(maxlen = 1000) self.dbsLocations = collections.deque(maxlen = 1000) self.workflowIDs = collections.deque(maxlen = 1000) self.workflowPaths = collections.deque(maxlen = 1000) self.phedex = PhEDEx() self.locLists = self.phedex.getNodeMap() return
def __init__(self, config): """ __init__ Create all DAO objects that are used by this class. """ WMConnectionBase.__init__(self, "WMCore.WMBS") myThread = threading.currentThread() self.dbsDaoFactory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) self.getOutputMapAction = self.daofactory( classname="Jobs.GetOutputMap") self.bulkAddToFilesetAction = self.daofactory( classname="Fileset.BulkAddByLFN") self.bulkParentageAction = self.daofactory( classname="Files.AddBulkParentage") self.getJobTypeAction = self.daofactory(classname="Jobs.GetType") self.getParentInfoAction = self.daofactory( classname="Files.GetParentInfo") self.setParentageByJob = self.daofactory( classname="Files.SetParentageByJob") self.setParentageByMergeJob = self.daofactory( classname="Files.SetParentageByMergeJob") self.setFileRunLumi = self.daofactory(classname="Files.AddRunLumi") self.setFileLocation = self.daofactory( classname="Files.SetLocationByLFN") self.setFileAddChecksum = self.daofactory( classname="Files.AddChecksumByLFN") self.addFileAction = self.daofactory(classname="Files.Add") self.jobCompleteInput = self.daofactory(classname="Jobs.CompleteInput") self.setBulkOutcome = self.daofactory(classname="Jobs.SetOutcomeBulk") self.getWorkflowSpec = self.daofactory( classname="Workflow.GetSpecAndNameFromTask") self.getJobInfoByID = self.daofactory(classname="Jobs.LoadFromID") self.getFullJobInfo = self.daofactory( classname="Jobs.LoadForErrorHandler") self.getJobTaskNameAction = self.daofactory( classname="Jobs.GetFWJRTaskName") self.pnn_to_psn = self.daofactory( classname="Locations.GetPNNtoPSNMapping").execute() self.dbsStatusAction = self.dbsDaoFactory( classname="DBSBufferFiles.SetStatus") self.dbsParentStatusAction = self.dbsDaoFactory( classname="DBSBufferFiles.GetParentStatus") self.dbsChildrenAction = self.dbsDaoFactory( classname="DBSBufferFiles.GetChildren") self.dbsCreateFiles = self.dbsDaoFactory( classname="DBSBufferFiles.Add") self.dbsSetLocation = self.dbsDaoFactory( classname="DBSBufferFiles.SetLocationByLFN") self.dbsInsertLocation = self.dbsDaoFactory( classname="DBSBufferFiles.AddLocation") self.dbsSetChecksum = self.dbsDaoFactory( classname="DBSBufferFiles.AddChecksumByLFN") self.dbsSetRunLumi = self.dbsDaoFactory( classname="DBSBufferFiles.AddRunLumi") self.dbsGetWorkflow = self.dbsDaoFactory(classname="ListWorkflow") self.dbsLFNHeritage = self.dbsDaoFactory( classname="DBSBufferFiles.BulkHeritageParent") self.stateChanger = ChangeState(config) # Decide whether or not to attach jobReport to returned value self.returnJobReport = getattr(config.JobAccountant, 'returnReportFromWorker', False) # Store location for the specs for DBS self.specDir = getattr(config.JobAccountant, 'specDir', None) # maximum RAW EDM size for Repack output before data is put into Error dataset and skips PromptReco self.maxAllowedRepackOutputSize = getattr( config.JobAccountant, 'maxAllowedRepackOutputSize', 12 * 1024 * 1024 * 1024) # ACDC service self.dataCollection = DataCollectionService( url=config.ACDC.couchurl, database=config.ACDC.database) jobDBurl = sanitizeURL(config.JobStateMachine.couchurl)['url'] jobDBName = config.JobStateMachine.couchDBName jobCouchdb = CouchServer(jobDBurl) self.fwjrCouchDB = jobCouchdb.connectDatabase("%s/fwjrs" % jobDBName) self.localWMStats = WMStatsWriter(config.TaskArchiver.localWMStatsURL, appName="WMStatsAgent") # Hold data for later commital self.dbsFilesToCreate = [] self.wmbsFilesToBuild = [] self.wmbsMergeFilesToBuild = [] self.fileLocation = None self.mergedOutputFiles = [] self.listOfJobsToSave = [] self.listOfJobsToFail = [] self.filesetAssoc = [] self.parentageBinds = [] self.parentageBindsForMerge = [] self.jobsWithSkippedFiles = {} self.count = 0 self.datasetAlgoID = collections.deque(maxlen=1000) self.datasetAlgoPaths = collections.deque(maxlen=1000) self.dbsLocations = set() self.workflowIDs = collections.deque(maxlen=1000) self.workflowPaths = collections.deque(maxlen=1000) self.phedex = PhEDEx() self.locLists = self.phedex.getNodeMap() return
def __init__(self, config): """ __init__ Create all DAO objects that are used by this class. """ WMConnectionBase.__init__(self, "WMCore.WMBS") myThread = threading.currentThread() self.dbsDaoFactory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) self.getOutputMapAction = self.daofactory( classname="Jobs.GetOutputMap") self.bulkAddToFilesetAction = self.daofactory( classname="Fileset.BulkAddByLFN") self.bulkParentageAction = self.daofactory( classname="Files.AddBulkParentage") self.getJobTypeAction = self.daofactory(classname="Jobs.GetType") self.getParentInfoAction = self.daofactory( classname="Files.GetParentInfo") self.setParentageByJob = self.daofactory( classname="Files.SetParentageByJob") self.setFileRunLumi = self.daofactory(classname="Files.AddRunLumi") self.setFileLocation = self.daofactory( classname="Files.SetLocationByLFN") self.setFileAddChecksum = self.daofactory( classname="Files.AddChecksumByLFN") self.addFileAction = self.daofactory(classname="Files.Add") self.jobCompleteInput = self.daofactory(classname="Jobs.CompleteInput") self.setBulkOutcome = self.daofactory(classname="Jobs.SetOutcomeBulk") self.getWorkflowSpec = self.daofactory( classname="Workflow.GetSpecAndNameFromTask") self.dbsStatusAction = self.dbsDaoFactory( classname="DBSBufferFiles.SetStatus") self.dbsParentStatusAction = self.dbsDaoFactory( classname="DBSBufferFiles.GetParentStatus") self.dbsChildrenAction = self.dbsDaoFactory( classname="DBSBufferFiles.GetChildren") self.dbsCreateFiles = self.dbsDaoFactory( classname="DBSBufferFiles.Add") self.dbsSetLocation = self.dbsDaoFactory( classname="DBSBufferFiles.SetLocationByLFN") self.dbsInsertLocation = self.dbsDaoFactory( classname="DBSBufferFiles.AddLocation") self.dbsSetChecksum = self.dbsDaoFactory( classname="DBSBufferFiles.AddChecksumByLFN") self.dbsSetRunLumi = self.dbsDaoFactory( classname="DBSBufferFiles.AddRunLumi") self.insertWorkflow = self.dbsDaoFactory(classname="InsertWorkflow") self.dbsNewAlgoAction = self.dbsDaoFactory(classname="NewAlgo") self.dbsNewDatasetAction = self.dbsDaoFactory(classname="NewDataset") self.dbsAssocAction = self.dbsDaoFactory(classname="AlgoDatasetAssoc") self.dbsExistsAction = self.dbsDaoFactory( classname="DBSBufferFiles.ExistsForAccountant") self.dbsLFNHeritage = self.dbsDaoFactory( classname="DBSBufferFiles.BulkHeritageParent") self.dbsSetDatasetAlgoAction = self.dbsDaoFactory( classname="SetDatasetAlgo") self.stateChanger = ChangeState(config) # Decide whether or not to attach jobReport to returned value self.returnJobReport = getattr(config.JobAccountant, 'returnReportFromWorker', False) # Store location for the specs for DBS self.specDir = getattr(config.JobAccountant, 'specDir', None) # Hold data for later commital self.dbsFilesToCreate = [] self.wmbsFilesToBuild = [] self.fileLocation = None self.mergedOutputFiles = [] self.listOfJobsToSave = [] self.listOfJobsToFail = [] self.filesetAssoc = [] self.count = 0 self.datasetAlgoID = collections.deque(maxlen=1000) self.datasetAlgoPaths = collections.deque(maxlen=1000) self.dbsLocations = collections.deque(maxlen=1000) self.workflowIDs = collections.deque(maxlen=1000) self.workflowPaths = collections.deque(maxlen=1000) self.phedex = PhEDEx() self.locLists = self.phedex.getNodeMap() return
def __init__(self, config): """ __init__ Create all DAO objects that are used by this class. """ WMConnectionBase.__init__(self, "WMCore.WMBS") myThread = threading.currentThread() self.dbsDaoFactory = DAOFactory(package = "WMComponent.DBS3Buffer", logger = myThread.logger, dbinterface = myThread.dbi) self.getOutputMapAction = self.daofactory(classname = "Jobs.GetOutputMap") self.bulkAddToFilesetAction = self.daofactory(classname = "Fileset.BulkAddByLFN") self.bulkParentageAction = self.daofactory(classname = "Files.AddBulkParentage") self.getJobTypeAction = self.daofactory(classname = "Jobs.GetType") self.getParentInfoAction = self.daofactory(classname = "Files.GetParentInfo") self.setParentageByJob = self.daofactory(classname = "Files.SetParentageByJob") self.setFileRunLumi = self.daofactory(classname = "Files.AddRunLumi") self.setFileLocation = self.daofactory(classname = "Files.SetLocationByLFN") self.setFileAddChecksum = self.daofactory(classname = "Files.AddChecksumByLFN") self.addFileAction = self.daofactory(classname = "Files.Add") self.jobCompleteInput = self.daofactory(classname = "Jobs.CompleteInput") self.setBulkOutcome = self.daofactory(classname = "Jobs.SetOutcomeBulk") self.getWorkflowSpec = self.daofactory(classname = "Workflow.GetSpecAndNameFromTask") self.getJobInfoByID = self.daofactory(classname = "Jobs.LoadFromID") self.getFullJobInfo = self.daofactory(classname = "Jobs.LoadForErrorHandler") self.dbsStatusAction = self.dbsDaoFactory(classname = "DBSBufferFiles.SetStatus") self.dbsParentStatusAction = self.dbsDaoFactory(classname = "DBSBufferFiles.GetParentStatus") self.dbsChildrenAction = self.dbsDaoFactory(classname = "DBSBufferFiles.GetChildren") self.dbsCreateFiles = self.dbsDaoFactory(classname = "DBSBufferFiles.Add") self.dbsSetLocation = self.dbsDaoFactory(classname = "DBSBufferFiles.SetLocationByLFN") self.dbsInsertLocation = self.dbsDaoFactory(classname = "DBSBufferFiles.AddLocation") self.dbsSetChecksum = self.dbsDaoFactory(classname = "DBSBufferFiles.AddChecksumByLFN") self.dbsSetRunLumi = self.dbsDaoFactory(classname = "DBSBufferFiles.AddRunLumi") self.dbsGetWorkflow = self.dbsDaoFactory(classname = "ListWorkflow") self.dbsLFNHeritage = self.dbsDaoFactory(classname = "DBSBufferFiles.BulkHeritageParent") self.stateChanger = ChangeState(config) # Decide whether or not to attach jobReport to returned value self.returnJobReport = getattr(config.JobAccountant, 'returnReportFromWorker', False) # Store location for the specs for DBS self.specDir = getattr(config.JobAccountant, 'specDir', None) # ACDC service self.dataCollection = DataCollectionService(url = config.ACDC.couchurl, database = config.ACDC.database) jobDBurl = sanitizeURL(config.JobStateMachine.couchurl)['url'] jobDBName = config.JobStateMachine.couchDBName jobCouchdb = CouchServer(jobDBurl) self.fwjrCouchDB = jobCouchdb.connectDatabase("%s/fwjrs" % jobDBName) self.localWMStats = WMStatsWriter(config.TaskArchiver.localWMStatsURL) # Hold data for later commital self.dbsFilesToCreate = [] self.wmbsFilesToBuild = [] self.fileLocation = None self.mergedOutputFiles = [] self.listOfJobsToSave = [] self.listOfJobsToFail = [] self.filesetAssoc = [] self.parentageBinds = [] self.jobsWithSkippedFiles = {} self.count = 0 self.datasetAlgoID = collections.deque(maxlen = 1000) self.datasetAlgoPaths = collections.deque(maxlen = 1000) self.dbsLocations = collections.deque(maxlen = 1000) self.workflowIDs = collections.deque(maxlen = 1000) self.workflowPaths = collections.deque(maxlen = 1000) self.phedex = PhEDEx() self.locLists = self.phedex.getNodeMap() return