def __init__(self, config): """ Initialise class members """ #Need a better way to test this without turning off this next line BaseDaemon.__init__(self, config, 'AsyncTransfer') self.dropbox_dir = '%s/dropbox/outputs' % self.config.componentDir if not os.path.isdir(self.dropbox_dir): try: os.makedirs(self.dropbox_dir) except OSError as e: if e.errno == errno.EEXIST: pass else: self.logger.error('Unknown error in mkdir' % e.errno) raise server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) config_server = CouchServer(dburl=self.config.config_couch_instance) self.config_db = config_server.connectDatabase(self.config.config_database) self.logger.debug('Connected to CouchDB') self.pool = Pool(processes=self.config.pool_size) try: self.phedex = PhEDEx(responseType='xml', dict = {'key': self.config.opsProxy, 'cert': self.config.opsProxy}) except Exception as e: self.logger.exception('PhEDEx exception: %s' % e) # Set up a factory for loading plugins self.factory = WMFactory(self.config.schedAlgoDir, namespace = self.config.schedAlgoDir) result_list = [] current_running = []
class Algo: """ Plugins parent class. """ def __init__(self, config, logger, users, pool_size): """ Initialise class members """ self.config = config self.logger = logger self.asyncServer = CouchServer(self.config.couch_instance, ckey = self.config.opsProxy, cert = self.config.opsProxy) self.db = self.asyncServer.connectDatabase(self.config.files_database) self.config_db = self.asyncServer.connectDatabase(self.config.config_database) self.users = users self.pool_size = pool_size def __call__(self): """ __call__ should be over written by subclasses such that useful results are returned """ return [] def updateSource(self, inputDict): """ UpdateSource should be over written by subclasses to make a specific update in the source """ return []
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.Statistics try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') server = CouchServer( dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) config_server = CouchServer( dburl=self.config.config_couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.config_db = config_server.connectDatabase( self.config.config_database) self.logger.debug('Connected to CouchDB') statserver = CouchServer(self.config.couch_statinstance) self.statdb = statserver.connectDatabase( self.config.statitics_database) self.logger.debug('Connected to Stat CouchDB') self.iteration_docs = [] self.exptime = None
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.Statistics try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) config_server = CouchServer(dburl=self.config.config_couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.config_db = config_server.connectDatabase( self.config.config_database) self.mon_db = server.connectDatabase(self.config.mon_database) self.logger.debug('Connected to CouchDB') statserver = CouchServer(self.config.couch_statinstance) self.statdb = statserver.connectDatabase( self.config.statitics_database) self.logger.debug('Connected to Stat CouchDB') self.iteration_docs = [] self.exptime = None
def __init__(self, config): """ Initialise class members """ # Need a better way to test this without turning off this next line BaseWorkerThread.__init__(self) # logging.basicConfig(format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s',datefmt = '%m-%d %H:%M') # self.logger = logging.getLogger() # self.logger is set up by the BaseWorkerThread, we just set it's level self.config = config.AsyncTransfer try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug("Configuration loaded") server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) config_server = CouchServer(dburl=self.config.config_couch_instance) self.config_db = config_server.connectDatabase(self.config.config_database) self.logger.debug("Connected to CouchDB") self.pool = Pool(processes=self.config.pool_size) try: self.phedex = PhEDEx(responseType="xml") except Exception, e: self.logger.exception("PhEDEx exception: %s" % e)
def __init__(self, config): """ Initialise class members """ #Need a better way to test this without turning off this next line BaseWorkerThread.__init__(self) #logging.basicConfig(format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s',datefmt = '%m-%d %H:%M') #self.logger = logging.getLogger() # self.logger is set up by the BaseWorkerThread, we just set it's level self.config = config.AsyncTransfer try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) config_server = CouchServer(dburl=self.config.config_couch_instance) self.config_db = config_server.connectDatabase( self.config.config_database) self.logger.debug('Connected to CouchDB') self.pool = Pool(processes=self.config.pool_size) try: self.phedex = PhEDEx(responseType='xml') except Exception, e: self.logger.exception('PhEDEx exception: %s' % e)
class Algo: """ Plugins parent class. """ def __init__(self, config, logger, users, pool_size): """ Initialise class members """ self.config = config self.logger = logger self.asyncServer = CouchServer(self.config.couch_instance) self.db = self.asyncServer.connectDatabase(self.config.files_database) self.config_db = self.asyncServer.connectDatabase(self.config.config_database) self.users = users self.pool_size = pool_size def __call__(self): """ __call__ should be over written by subclasses such that useful results are returned """ return [] def updateSource(self, inputDict): """ UpdateSource should be over written by subclasses to make a specific update in the source """ return []
class ChangeState(WMObject, WMConnectionBase): """ Propagate the state of a job through the JSM. """ def __init__(self, config, couchDbName=None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName == None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName try: self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self.jobsdatabase = self.couchdb.connectDatabase("%s/jobs" % self.dbname, size=250) self.fwjrdatabase = self.couchdb.connectDatabase("%s/fwjrs" % self.dbname, size=250) self.jsumdatabase = self.couchdb.connectDatabase(getattr( self.config.JobStateMachine, 'jobSummaryDBName'), size=250) except Exception, ex: logging.error("Error connecting to couch: %s" % str(ex)) self.jobsdatabase = None self.fwjrdatabase = None self.jsumdatabase = None try: self.dashboardReporter = DashboardReporter(config) except Exception, ex: logging.error("Error setting up the \ dashboard reporter: %s" % str(ex))
def testB_testErrors(self): """ _testErrors_ Test with a failed FWJR """ myThread = threading.currentThread() config = self.getConfig() workloadPath = os.path.join(self.testDir, 'specDir', 'spec.pkl') workload = self.createWorkload(workloadName = workloadPath) testJobGroup = self.createTestJobGroup(config = config, name = workload.name(), specLocation = workloadPath, error = True) cachePath = os.path.join(config.JobCreator.jobCacheDir, "TestWorkload", "ReReco") os.makedirs(cachePath) self.assertTrue(os.path.exists(cachePath)) couchdb = CouchServer(config.JobStateMachine.couchurl) jobdb = couchdb.connectDatabase("%s/jobs" % self.databaseName) fwjrdb = couchdb.connectDatabase("%s/fwjrs" % self.databaseName) jobdb.loadView("JobDump", "jobsByWorkflowName", options = {"startkey": [workload.name()], "endkey": [workload.name(), {}]})['rows'] fwjrdb.loadView("FWJRDump", "fwjrsByWorkflowName", options = {"startkey": [workload.name()], "endkey": [workload.name(), {}]})['rows'] testTaskArchiver = TaskArchiverPoller(config = config) testTaskArchiver.algorithm() dbname = getattr(config.JobStateMachine, "couchDBName") workdatabase = couchdb.connectDatabase("%s/workloadsummary" % dbname) workloadSummary = workdatabase.document(id = workload.name()) self.assertEqual(workloadSummary['errors']['/TestWorkload/ReReco']['failureTime'], 500) self.assertTrue(workloadSummary['errors']['/TestWorkload/ReReco']['cmsRun1'].has_key('99999')) failedRunInfo = workloadSummary['errors']['/TestWorkload/ReReco']['cmsRun1']['99999']['runs'] for key, value in failedRunInfo.items(): failedRunInfo[key] = list(set(value)) self.assertEquals(failedRunInfo, {'10' : [12312]}, "Wrong lumi information in the summary for failed jobs") # Check the failures by site histograms self.assertEqual(workloadSummary['histograms']['workflowLevel']['failuresBySite']['data']['T1_IT_CNAF']['Failed Jobs'], 10) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['data']['T1_IT_CNAF']['99999'], 10) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['data']['T1_IT_CNAF']['8020'], 10) self.assertEqual(workloadSummary['histograms']['workflowLevel']['failuresBySite']['average']['Failed Jobs'], 10) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['average']['99999'], 10) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['average']['8020'], 10) self.assertEqual(workloadSummary['histograms']['workflowLevel']['failuresBySite']['stdDev']['Failed Jobs'], 0) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['stdDev']['99999'], 0) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['stdDev']['8020'], 0) return
class ChangeState(WMObject, WMConnectionBase): """ Propagate the state of a job through the JSM. """ def __init__(self, config, couchDbName=None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName == None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName try: self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self.jobsdatabase = self.couchdb.connectDatabase("%s/jobs" % self.dbname) self.fwjrdatabase = self.couchdb.connectDatabase("%s/fwjrs" % self.dbname) except Exception, ex: logging.error("Error connecting to couch: %s" % str(ex)) self.jobsdatabase = None self.fwjrdatabase = None try: self.dashboardReporter = DashboardReporter(config) except Exception, ex: logging.error( "Error setting up the \ dashboard reporter: %s" % str(ex) )
class ChangeState(WMObject, WMConnectionBase): """ Propagate the state of a job through the JSM. """ def __init__(self, config, couchDbName = None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") if couchDbName == None: self.dbname = getattr(self.config.JobStateMachine, "couchDBName") else: self.dbname = couchDbName try: self.couchdb = CouchServer(self.config.JobStateMachine.couchurl) self.jobsdatabase = self.couchdb.connectDatabase("%s/jobs" % self.dbname) self.fwjrdatabase = self.couchdb.connectDatabase("%s/fwjrs" % self.dbname) except Exception, ex: logging.error("Error connecting to couch: %s" % str(ex)) self.jobsdatabase = None self.fwjrdatabase = None self.getCouchDAO = self.daofactory("Jobs.GetCouchID") self.setCouchDAO = self.daofactory("Jobs.SetCouchID") self.incrementRetryDAO = self.daofactory("Jobs.IncrementRetry") self.workflowTaskDAO = self.daofactory("Jobs.GetWorkflowTask") self.maxUploadedInputFiles = getattr(self.config.JobStateMachine, 'maxFWJRInputFiles', 1000) return
class WorkQueueBackend(object): """ Represents persistent storage for WorkQueue """ def __init__(self, db_url, db_name='workqueue', inbox_name=None, parentQueue=None, queueUrl=None, logger=None): if logger: self.logger = logger else: import logging self.logger = logging if inbox_name == None: inbox_name = "%s_inbox" % db_name self.server = CouchServer(db_url) self.parentCouchUrlWithAuth = parentQueue if parentQueue: self.parentCouchUrl = sanitizeURL(parentQueue)['url'] else: self.parentCouchUrl = None self.db = self.server.connectDatabase(db_name, create=False, size=10000) self.hostWithAuth = db_url self.inbox = self.server.connectDatabase(inbox_name, create=False, size=10000) self.queueUrl = sanitizeURL(queueUrl or (db_url + '/' + db_name))['url'] def forceQueueSync(self): """Force a blocking replication - for use mainly in tests""" self.pullFromParent(continuous=False) self.sendToParent(continuous=False) def pullFromParent(self, continuous=True, cancel=False): """Replicate from parent couch - blocking""" try: if self.parentCouchUrl and self.queueUrl: self.server.replicate(source=self.parentCouchUrl, destination="%s/%s" % (self.hostWithAuth, self.inbox.name), filter='WorkQueue/queueFilter', query_params={ 'childUrl': self.queueUrl, 'parentUrl': self.parentCouchUrl }, continuous=continuous, cancel=cancel, useReplicator=True) except Exception, ex: self.logger.warning('Replication from %s failed: %s' % (self.parentCouchUrl, str(ex)))
def testB_testErrors(self): """ _testErrors_ Test with a failed FWJR """ myThread = threading.currentThread() config = self.getConfig() workloadPath = os.path.join(self.testDir, 'specDir', 'spec.pkl') workload = self.createWorkload(workloadName = workloadPath) testJobGroup = self.createTestJobGroup(config = config, name = workload.name(), specLocation = workloadPath, error = True) cachePath = os.path.join(config.JobCreator.jobCacheDir, "TestWorkload", "ReReco") os.makedirs(cachePath) self.assertTrue(os.path.exists(cachePath)) couchdb = CouchServer(config.JobStateMachine.couchurl) jobdb = couchdb.connectDatabase("%s/jobs" % self.databaseName) fwjrdb = couchdb.connectDatabase("%s/fwjrs" % self.databaseName) jobdb.loadView("JobDump", "jobsByWorkflowName", options = {"startkey": [workload.name()], "endkey": [workload.name(), {}]})['rows'] fwjrdb.loadView("FWJRDump", "fwjrsByWorkflowName", options = {"startkey": [workload.name()], "endkey": [workload.name(), {}]})['rows'] testTaskArchiver = TaskArchiverPoller(config = config) testTaskArchiver.algorithm() dbname = getattr(config.JobStateMachine, "couchDBName") workdatabase = couchdb.connectDatabase("%s/workloadsummary" % dbname) workloadSummary = workdatabase.document(id = workload.name()) self.assertEqual(workloadSummary['errors']['/TestWorkload/ReReco']['failureTime'], 500) self.assertTrue(workloadSummary['errors']['/TestWorkload/ReReco']['cmsRun1'].has_key('99999')) self.assertEquals(workloadSummary['errors']['/TestWorkload/ReReco']['cmsRun1']['99999']['runs'], {'10' : [12312]}, "Wrong lumi information in the summary for failed jobs") # Check the failures by site histograms self.assertEqual(workloadSummary['histograms']['workflowLevel']['failuresBySite']['data']['T1_IT_CNAF']['Failed Jobs'], 10) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['data']['T1_IT_CNAF']['99999'], 10) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['data']['T1_IT_CNAF']['8020'], 10) self.assertEqual(workloadSummary['histograms']['workflowLevel']['failuresBySite']['average']['Failed Jobs'], 10) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['average']['99999'], 10) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['average']['8020'], 10) self.assertEqual(workloadSummary['histograms']['workflowLevel']['failuresBySite']['stdDev']['Failed Jobs'], 0) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['stdDev']['99999'], 0) self.assertEqual(workloadSummary['histograms']['stepLevel']['/TestWorkload/ReReco']['cmsRun1']['errorsBySite']['stdDev']['8020'], 0) return
def __init__(self, config): BaseDaemon.__init__(self, config, 'Analytics') server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) self.logger.debug('Connected to local couchDB') config_server = CouchServer(dburl=self.config.config_couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.config_db = config_server.connectDatabase(self.config.config_database) self.amq_auth_file = self.config.amq_auth_file monitoring_server = CouchServer(dburl=self.config.couch_user_monitoring_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.monitoring_db = monitoring_server.connectDatabase(self.config.user_monitoring_db) self.logger.debug('Connected to user_monitoring_db in couchDB')
class WMLoggingTest(unittest.TestCase): def setUp(self): # Make an instance of the server self.server = CouchServer(os.getenv("COUCHURL", 'http://*****:*****@localhost:5984')) testname = self.id().split('.')[-1] # Create a database, drop an existing one first self.dbname = 'cmscouch_unittest_%s' % testname.lower() if self.dbname in self.server.listDatabases(): self.server.deleteDatabase(self.dbname) self.server.createDatabase(self.dbname) self.db = self.server.connectDatabase(self.dbname) def tearDown(self): # This used to test self._exc_info to only run on success. Broke in 2.7. Removed. self.server.deleteDatabase(self.dbname) def testLog(self): """ Write ten log messages to the database at three different levels """ my_logger = logging.getLogger('MyLogger') my_logger.setLevel(logging.DEBUG) handler = CouchHandler(self.server.url, self.dbname) formatter = logging.Formatter('%(message)s') handler.setFormatter(formatter) my_logger.addHandler(handler) for _ in range(10): my_logger.debug('This is probably all noise.') my_logger.info('Jackdaws love my big sphinx of quartz.') my_logger.error('HOLLY CRAP!') logs = self.db.allDocs()['rows'] self.assertEqual(30, len(logs))
def __init__(self, config, logger): self.config = config.CRABAsyncTransfer server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) self.logger = logger self.size = 0 self.result = []
def setUp(self): """ _setUp_ Setup couchdb and the test environment """ super(ResubmitBlockTest, self).setUp() self.group = 'unknown' self.user = '******' # Set external test helpers self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setupCouch("resubmitblock_t", "ACDC", "GroupUser") EmulatorHelper.setEmulators(phedex=False, dbs=False, siteDB=True, requestMgr=False) # Define test environment self.couchUrl = os.environ["COUCHURL"] self.acdcDBName = 'resubmitblock_t' self.validLocations = ['T2_US_Nebraska', 'T1_US_FNAL', 'T1_UK_RAL'] self.siteWhitelist = ['T2_XX_SiteA'] self.workflowName = 'dballest_ReReco_workflow' couchServer = CouchServer(dburl=self.couchUrl) self.acdcDB = couchServer.connectDatabase(self.acdcDBName, create=False) user = makeUser(self.group, '*****@*****.**', self.couchUrl, self.acdcDBName) user.create() return
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.FilesCleaner self.logger.debug('Configuration loaded') try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') config_server = CouchServer(dburl=self.config.config_couch_instance) self.config_db = config_server.connectDatabase(self.config.config_database) self.logger.debug('Connected to files DB') self.phedex = PhEDEx(responseType='xml') self.log_dir = '%s/logs/%s/%s/%s' % ( self.config.componentDir, \ str(datetime.datetime.now().month), str(datetime.datetime.now().year), "Ops") try: os.makedirs(self.log_dir) except OSError, e: if e.errno == errno.EEXIST: pass else: self.logger.error('Unknown error in mkdir' % e.errno) raise
def atestB_testErrors(self): """ _testErrors_ Test with a failed FWJR """ myThread = threading.currentThread() config = self.getConfig() workloadPath = os.path.join(self.testDir, "specDir", "spec.pkl") workload = self.createWorkload(workloadName=workloadPath) testJobGroup = self.createTestJobGroup( config=config, name=workload.name(), specLocation=workloadPath, error=True ) cachePath = os.path.join(config.JobCreator.jobCacheDir, "TestWorkload", "ReReco") os.makedirs(cachePath) self.assertTrue(os.path.exists(cachePath)) testTaskArchiver = TaskArchiverPoller(config=config) testTaskArchiver.algorithm() dbname = getattr(config.JobStateMachine, "couchDBName") couchdb = CouchServer(config.JobStateMachine.couchurl) workdatabase = couchdb.connectDatabase(dbname) workloadSummary = workdatabase.document(id="TestWorkload") self.assertEqual(workloadSummary["/TestWorkload/ReReco"]["failureTime"], 500) self.assertTrue(workloadSummary["/TestWorkload/ReReco"]["cmsRun1"].has_key("99999")) return
def saveCouch(self, couchUrl, couchDBName, metadata=None): """ Save this spec in CouchDB. Returns URL """ from WMCore.Database.CMSCouch import CouchServer, CouchInternalServerError metadata = metadata or {} server = CouchServer(couchUrl) database = server.connectDatabase(couchDBName) name = self.name() uri = '/%s/%s' % (couchDBName, name) specuri = uri + '/spec' if not database.documentExists(name): self.setSpecUrl(couchUrl + specuri) doc = database.put(uri, data=metadata, contentType='application/json') # doc = database.commitOne(self.name(), metadata) rev = doc['rev'] else: # doc = database.get(uri+'?revs=true') doc = database.document(name) rev = doc['_rev'] # specuriwrev = specuri + '?rev=%s' % rev workloadString = pickle.dumps(self.data) # result = database.put(specuriwrev, workloadString, contentType='application/text') retval = database.addAttachment(name, rev, workloadString, 'spec') if retval.get('ok', False) is not True: msg = "Failed to save a spec attachment in CouchDB for %s" % name raise CouchInternalServerError(msg, data=None, result=retval) url = couchUrl + specuri return url
def setUp(self): """ _setUp_ Setup couchdb and the test environment """ # Set external test helpers self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setupCouch("resubmitblock_t", "ACDC", "GroupUser") EmulatorHelper.setEmulators(siteDB = True) # Define test environment self.couchUrl = os.environ["COUCHURL"] self.acdcDBName = 'resubmitblock_t' self.validLocations = ['srm-cms.gridpp.rl.ac.uk', 'cmssrm.fnal.gov', 'srm.unl.edu'] self.validLocationsCMSNames = ['T2_US_Nebraska', 'T1_US_FNAL', 'T1_UK_RAL'] self.siteWhitelist = ['T2_XX_SiteA'] self.workflowName = 'dballest_ReReco_workflow' couchServer = CouchServer(dburl = self.couchUrl) self.acdcDB = couchServer.connectDatabase(self.acdcDBName, create = False) user = makeUser('unknown', '*****@*****.**', self.couchUrl, self.acdcDBName) user.create() return
def saveCouch(self, couchUrl, couchDBName, metadata=None): """ Save this spec in CouchDB. Returns URL """ from WMCore.Database.CMSCouch import CouchServer, CouchInternalServerError metadata = metadata or {} server = CouchServer(couchUrl) database = server.connectDatabase(couchDBName) name = self.name() uri = '/%s/%s' % (couchDBName, name) specuri = uri + '/spec' if not database.documentExists(name): self.setSpecUrl(couchUrl + specuri) doc = database.put(uri, data=metadata, contentType='application/json') # doc = database.commitOne(self.name(), metadata) rev = doc['rev'] else: # doc = database.get(uri+'?revs=true') doc = database.document(name) rev = doc['_rev'] # specuriwrev = specuri + '?rev=%s' % rev # FIXME: once both central services and WMAgent are in Py3, we can remove protocol=0 workloadString = pickle.dumps(self.data, protocol=0) # result = database.put(specuriwrev, workloadString, contentType='application/text') retval = database.addAttachment(name, rev, workloadString, 'spec') if retval.get('ok', False) is not True: msg = "Failed to save a spec attachment in CouchDB for %s" % name raise CouchInternalServerError(msg, data=None, result=retval) url = couchUrl + specuri return url
def setUp(self): """ _setUp_ Initialize the database and couch. """ self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setupCouch("promptreco_t", "ConfigCache") self.testInit.setSchema(customModules=["WMCore.WMBS"], useDefault=False) couchServer = CouchServer(os.environ["COUCHURL"]) self.configDatabase = couchServer.connectDatabase("promptreco_t") self.testDir = self.testInit.generateWorkDir() myThread = threading.currentThread() self.daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) self.listTasksByWorkflow = self.daoFactory(classname="Workflow.LoadFromName") self.listFilesets = self.daoFactory(classname="Fileset.List") self.listSubsMapping = self.daoFactory(classname="Subscriptions.ListSubsAndFilesetsFromWorkflow") return
def setUp(self): """ _setUp_ Create two subscriptions: One that contains a single file and one that contains multiple files. """ self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.couchUrl = os.environ["COUCHURL"] self.couchDBName = "acdc_event_based_t" self.testInit.setupCouch(self.couchDBName, "GroupUser", "ACDC") self.testInit.setSchema(customModules=["WMCore.WMBS"], useDefault=False) couchSever = CouchServer(dburl=self.couchUrl) self.couchDB = couchSever.connectDatabase(self.couchDBName) self.populateWMBS() self.performanceParams = { 'timePerEvent': 12, 'memoryRequirement': 2300, 'sizePerEvent': 400 } return
class DQMCouchAPI(WMObject, WMConnectionBase): """ Update the harvesting status of a dataset in CouchDB """ def __init__(self, config, couchDbName = None, couchurl = None): WMObject.__init__(self, config) WMConnectionBase.__init__(self, "WMCore.WMBS") self.designDoc = "HarvestingDatasets" if couchDbName == None: self.dbname = getattr(self.config.HarvestingScheduler, "couchDBName", "dqm_default") else: self.dbname = couchDbName if couchurl is not None: self.couchurl = couchurl elif getattr(self.config.HarvestingScheduler, "couchurl", None) is not None: self.couchurl = self.config.HarvestingScheduler.couchurl else: self.couchurl = self.config.JobStateMachine.couchurl try: self.couchdb = CouchServer(self.couchurl) if self.dbname not in self.couchdb.listDatabases(): self.createDatabase() self.database = self.couchdb.connectDatabase(self.dbname, size=_LIMIT) except Exception, ex: logging.error("Error connecting to couch: %s" % str(ex)) self.database = None return
def setUp(self): """ _setUp_ Setup couchdb and the test environment """ super(ResubmitBlockTest, self).setUp() self.group = 'unknown' self.user = '******' # Set external test helpers self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setupCouch("resubmitblock_t", "ACDC", "GroupUser") # Define test environment self.couchUrl = os.environ["COUCHURL"] self.acdcDBName = 'resubmitblock_t' self.validLocations = [ 'T2_US_Nebraska', 'T1_US_FNAL_Disk', 'T1_UK_RAL_Disk' ] self.siteWhitelist = ['T2_XX_SiteA'] cric = CRIC() # Convert phedex node name to a valid processing site name self.PSNs = cric.PNNstoPSNs(self.validLocations) self.workflowName = 'dballest_ReReco_workflow' couchServer = CouchServer(dburl=self.couchUrl) self.acdcDB = couchServer.connectDatabase(self.acdcDBName, create=False) user = makeUser(self.group, '*****@*****.**', self.couchUrl, self.acdcDBName) user.create() return
def __init__(self, config): """ Initialise class members """ BaseDaemon.__init__(self, config, 'RetryManager') if self.config.isOracle: self.oracleDB = HTTPRequests(self.config.oracleDB, self.config.opsProxy, self.config.opsProxy) else: try: server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) except Exception as e: self.logger.exception('A problem occured when connecting to couchDB: %s' % e) raise self.logger.debug('Connected to files DB') # Set up a factory for loading plugins self.factory = WMFactory(self.config.retryAlgoDir, namespace=self.config.retryAlgoDir) try: self.plugin = self.factory.loadObject(self.config.algoName, self.config, getFromCache=False, listFlag=True) except Exception as ex: msg = "Error loading plugin %s on path %s\n" % (self.config.algoName, self.config.retryAlgoDir) msg += str(ex) self.logger.error(msg) raise RetryManagerException(msg) self.cooloffTime = self.config.cooloffTime
def __init__(self, config, logger): self.config = config.CRABAsyncTransfer server = CouchServer(self.config.couch_instance) self.db = server.connectDatabase(self.config.files_database) self.logger = logger self.size = 0 self.result = []
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.FilesCleaner self.logger.debug('Configuration loaded') try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') config_server = CouchServer(dburl=self.config.config_couch_instance) self.config_db = config_server.connectDatabase( self.config.config_database) self.logger.debug('Connected to files DB') self.phedex = PhEDEx(responseType='xml') self.log_dir = '%s/logs/%s/%s/%s' % ( self.config.componentDir, \ str(datetime.datetime.now().month), str(datetime.datetime.now().year), "Ops") try: os.makedirs(self.log_dir) except OSError, e: if e.errno == errno.EEXIST: pass else: self.logger.error('Unknown error in mkdir' % e.errno) raise
def resubmitCouchPublication(self, asourl, asodb, proxy, taskname): """ Resubmit failed publications by resetting the publication status in the CouchDB documents. """ server = CouchServer(dburl=asourl, ckey=proxy, cert=proxy) try: database = server.connectDatabase(asodb) except Exception as ex: msg = "Error while trying to connect to CouchDB: %s" % (str(ex)) raise Exception(msg) try: failedPublications = database.loadView('DBSPublisher', 'PublicationFailedByWorkflow', {'reduce': False, 'startkey': [taskname], 'endkey': [taskname, {}]})['rows'] except Exception as ex: msg = "Error while trying to load view 'DBSPublisher.PublicationFailedByWorkflow' from CouchDB: %s" % (str(ex)) raise Exception(msg) msg = "There are %d failed publications to resubmit: %s" % (len(failedPublications), failedPublications) self.logger.info(msg) for doc in failedPublications: docid = doc['id'] if doc['key'][0] != taskname: # this should never happen... msg = "Skipping document %s as it seems to correspond to another task: %s" % (docid, doc['key'][0]) self.logger.warning(msg) continue data = {'last_update': time.time(), 'retry': str(datetime.datetime.now()), 'publication_state': 'not_published', } try: database.updateDocument(docid, 'DBSPublisher', 'updateFile', data) self.logger.info("updating document %s ", docid) except Exception as ex: self.logger.error("Error updating document %s in CouchDB: %s", docid, str(ex)) return
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.AsyncTransfer # self.logger is set up by the BaseWorkerThread, we just set it's level try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') # Set up a factory for loading plugins self.factory = WMFactory(self.config.pluginDir, namespace = self.config.pluginDir) # Asynch db server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) self.logger.debug('Connected to CouchDB') return
def testMultiCoreReport(self): """ _testMultiCoreReport_ Verify that multicore reports can be json encoded and uploaded to couch. """ couchdb = CouchServer(os.environ["COUCHURL"]) fwjrdatabase = couchdb.connectDatabase("report_t/fwjrs") self.mcPath = os.path.join( WMCore.WMBase.getTestBase(), "WMCore_t/FwkJobReport_t/MulticoreReport.pkl") myReport = Report() myReport.unpersist(self.mcPath) fwjrDocument = { "_id": "303-0", "jobid": 303, "retrycount": 0, "fwjr": myReport.__to_json__(None), "type": "fwjr" } fwjrdatabase.queue(fwjrDocument, timestamp=True) fwjrdatabase.commit() return
def resubmitCouchPublication(self, asourl, asodb, proxy, taskname): """ Resubmit failed publications by resetting the publication status in the CouchDB documents. """ server = CouchServer(dburl=asourl, ckey=proxy, cert=proxy) try: database = server.connectDatabase(asodb) except Exception as ex: msg = "Error while trying to connect to CouchDB: %s" % (str(ex)) raise Exception(msg) try: failedPublications = database.loadView('DBSPublisher', 'PublicationFailedByWorkflow', {'reduce': False, 'startkey': [taskname], 'endkey': [taskname, {}]})['rows'] except Exception as ex: msg = "Error while trying to load view 'DBSPublisher.PublicationFailedByWorkflow' from CouchDB: %s" % (str(ex)) raise Exception(msg) msg = "There are %d failed publications to resubmit: %s" % (len(failedPublications), failedPublications) self.logger.info(msg) for doc in failedPublications: docid = doc['id'] if doc['key'][0] != taskname: # this should never happen... msg = "Skipping document %s as it seems to correspond to another task: %s" % (docid, doc['key'][0]) self.logger.warning(msg) continue data = {'last_update': time.time(), 'retry': str(datetime.datetime.now()), 'publication_state': 'not_published', } try: database.updateDocument(docid, 'DBSPublisher', 'updateFile', data) self.logger.info("updating document %s " % docid) except Exception as ex: msg = "Error updating document %s in CouchDB: %s" % (docid, str(ex)) self.logger.error(msg) return
def setUp(self): """ _setUp_ Setup couchdb and the test environment """ super(ResubmitBlockTest, self).setUp() self.group = 'unknown' self.user = '******' # Set external test helpers self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setupCouch("resubmitblock_t", "ACDC", "GroupUser") # Define test environment self.couchUrl = os.environ["COUCHURL"] self.acdcDBName = 'resubmitblock_t' self.validLocations = ['T2_US_Nebraska', 'T1_US_FNAL_Disk', 'T1_UK_RAL_Disk'] self.siteWhitelist = ['T2_XX_SiteA'] siteDB = SiteDB() #Convert phedex node name to a valid processing site name self.PSNs = siteDB.PNNstoPSNs(self.validLocations) self.workflowName = 'dballest_ReReco_workflow' couchServer = CouchServer(dburl=self.couchUrl) self.acdcDB = couchServer.connectDatabase(self.acdcDBName, create=False) user = makeUser(self.group, '*****@*****.**', self.couchUrl, self.acdcDBName) user.create() return
def setUp(self): """ _setUp_ Initialize the database and couch. """ super(MonteCarloTest, self).setUp() self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setupCouch(TEST_DB_NAME, "ConfigCache") self.testInit.setSchema(customModules=["WMCore.WMBS"], useDefault=False) self.testInit.generateWorkDir() couchServer = CouchServer(os.environ["COUCHURL"]) self.configDatabase = couchServer.connectDatabase(TEST_DB_NAME) myThread = threading.currentThread() self.daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) self.listTasksByWorkflow = self.daoFactory(classname="Workflow.LoadFromName") self.listFilesets = self.daoFactory(classname="Fileset.List") self.listSubsMapping = self.daoFactory(classname="Subscriptions.ListSubsAndFilesetsFromWorkflow") return
def setUp(self): """ _setUp_ Initialize the database and couch. """ self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setupCouch("promptreco_t", "ConfigCache") self.testInit.setSchema(customModules=["WMCore.WMBS"], useDefault=False) couchServer = CouchServer(os.environ["COUCHURL"]) self.configDatabase = couchServer.connectDatabase("promptreco_t") self.testDir = self.testInit.generateWorkDir() myThread = threading.currentThread() self.daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) self.listTasksByWorkflow = self.daoFactory(classname="Workflow.LoadFromName") self.listFilesets = self.daoFactory(classname="Fileset.List") self.listSubsMapping = self.daoFactory(classname="Subscriptions.ListSubsAndFilesetsFromWorkflow") self.promptSkim = None if PY3: self.assertItemsEqual = self.assertCountEqual return
class WorkQueueBackend(object): """ Represents persistent storage for WorkQueue """ def __init__(self, db_url, db_name = 'workqueue', inbox_name = None, parentQueue = None, queueUrl = None, logger = None): if logger: self.logger = logger else: import logging self.logger = logging if inbox_name == None: inbox_name = "%s_inbox" % db_name self.server = CouchServer(db_url) self.parentCouchUrlWithAuth = parentQueue if parentQueue: self.parentCouchUrl = sanitizeURL(parentQueue)['url'] else: self.parentCouchUrl = None self.db = self.server.connectDatabase(db_name, create = False, size = 10000) self.hostWithAuth = db_url self.inbox = self.server.connectDatabase(inbox_name, create = False, size = 10000) self.queueUrl = sanitizeURL(queueUrl or (db_url + '/' + db_name))['url'] def forceQueueSync(self): """Force a blocking replication - for use mainly in tests""" self.pullFromParent(continuous = False) self.sendToParent(continuous = False) def pullFromParent(self, continuous = True, cancel = False): """Replicate from parent couch - blocking""" try: if self.parentCouchUrl and self.queueUrl: self.server.replicate(source = self.parentCouchUrl, destination = "%s/%s" % (self.hostWithAuth, self.inbox.name), filter = 'WorkQueue/queueFilter', query_params = {'childUrl' : self.queueUrl, 'parentUrl' : self.parentCouchUrl}, continuous = continuous, cancel = cancel, useReplicator = True) except Exception, ex: self.logger.warning('Replication from %s failed: %s' % (self.parentCouchUrl, str(ex)))
def __init__(self, config): BaseDaemon.__init__(self, config, 'Statistics') server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) config_server = CouchServer(dburl=self.config.config_couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.config_db = config_server.connectDatabase(self.config.config_database) self.mon_db = server.connectDatabase(self.config.mon_database) self.logger.debug('Connected to CouchDB') statserver = CouchServer(self.config.couch_statinstance) self.statdb = statserver.connectDatabase(self.config.statitics_database) self.logger.debug('Connected to Stat CouchDB') self.iteration_docs = [] self.exptime = None
def testJ_Resubmission(self): """ _Resubmission_ Test Resubmission """ userName = '******' groupName = 'Li' teamName = 'Tang' schema = utils.getAndSetupSchema(self, userName=userName, groupName=groupName, teamName=teamName) schema['RequestType'] = "ReReco" configID = self.createConfig() schema["ConfigCacheID"] = configID schema["CouchDBName"] = self.couchDBName schema["CouchURL"] = os.environ.get("COUCHURL") schema["CouchWorkloadDBName"] = self.couchDBName result = self.jsonSender.put('request', schema) requestName = result[0]['RequestName'] # user, group schema already set up schema = utils.getSchema(groupName=groupName, userName=userName) schema['RequestType'] = "Resubmission" schema["CouchWorkloadDBName"] = self.couchDBName try: raises = False result = self.jsonSender.put('request', schema) except HTTPException as ex: raises = True self.assertEqual(ex.status, 400) self.assertTrue( "Error in Workload Validation: Validation failed: InitialTaskPath is mendatory" in ex.result) self.assertTrue(raises) schema["InitialTaskPath"] = '/%s/DataProcessing' % requestName schema["ACDCServer"] = os.environ.get("COUCHURL") schema["ACDCDatabase"] = self.couchDBName schema["CollectionName"] = "SomeOtherName" # Here we just make sure that real result goes through result = self.jsonSender.put('request', schema) resubmitName = result[0]['RequestName'] result = self.jsonSender.get('request/%s' % resubmitName) couchServer = CouchServer(self.testInit.couchUrl) reqmgrCouch = couchServer.connectDatabase(self.couchDBName) result = reqmgrCouch.loadView('ReqMgr', 'childresubmissionrequests', {}, [requestName])['rows'] self.assertEqual(len(result), 1) self.assertEqual(result[0]['key'], requestName) self.assertEqual(result[0]['id'], resubmitName)
def setUp(self): """ _setUp_ Setup the database and logging connection. Try to create all of the WMBS tables. Also add some dummy locations. """ super(WorkQueueTestCase, self).setUp() self.queueDB = 'workqueue_t' self.queueInboxDB = 'workqueue_t_inbox' self.globalQDB = 'workqueue_t_global' self.globalQInboxDB = 'workqueue_t_global_inbox' self.localQDB = 'workqueue_t_local' self.localQInboxDB = 'workqueue_t_local_inbox' self.localQDB2 = 'workqueue_t_local2' self.localQInboxDB2 = 'workqueue_t_local2_inbox' self.configCacheDB = 'workqueue_t_config_cache' self.logDBName = 'logdb_t' self.requestDBName = 'workqueue_t_reqmgr_workload_cache' self.setSchema() self.testInit = TestInit('WorkQueueTest') self.testInit.setLogging() self.testInit.setDatabaseConnection(destroyAllDatabase=True) self.addCleanup(self.testInit.clearDatabase) self.addCleanup(logging.debug, 'Cleanup called clearDatabase()') self.testInit.setSchema(customModules = self.schema, useDefault = False) self.testInit.setupCouch(self.queueDB, *self.couchApps) self.testInit.setupCouch(self.queueInboxDB, *self.couchApps) self.testInit.setupCouch(self.globalQDB, *self.couchApps) self.testInit.setupCouch(self.globalQInboxDB , *self.couchApps) self.testInit.setupCouch(self.localQDB, *self.couchApps) self.testInit.setupCouch(self.localQInboxDB, *self.couchApps) self.testInit.setupCouch(self.localQDB2, *self.couchApps) self.testInit.setupCouch(self.localQInboxDB2, *self.couchApps) self.testInit.setupCouch(self.configCacheDB, 'ConfigCache') self.testInit.setupCouch(self.logDBName, 'LogDB') self.testInit.setupCouch(self.requestDBName, 'ReqMgr') self.couchURL = os.environ.get("COUCHURL") couchServer = CouchServer(self.couchURL) self.configCacheDBInstance = couchServer.connectDatabase(self.configCacheDB) self.localCouchMonitor = CouchMonitor(self.couchURL) self.localCouchMonitor.deleteReplicatorDocs() self.addCleanup(self.localCouchMonitor.deleteReplicatorDocs) self.addCleanup(logging.debug, 'Cleanup called deleteReplicatorDocs()') self.addCleanup(self.testInit.tearDownCouch) self.addCleanup(logging.debug, 'Cleanup called tearDownCouch()') self.workDir = self.testInit.generateWorkDir() self.addCleanup(self.testInit.delWorkDir) self.addCleanup(logging.debug, 'Cleanup called delWorkDir()') return
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.Analytics try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) self.logger.debug('Connected to local couchDB') self.config_db = server.connectDatabase(self.config.config_database) self.amq_auth_file = self.config.amq_auth_file monitoring_server = CouchServer(dburl=self.config.couch_user_monitoring_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.monitoring_db = monitoring_server.connectDatabase(self.config.user_monitoring_db) self.logger.debug('Connected to user_monitoring_db in couchDB')
def testE_multicore(self): """ _multicore_ Create a workload summary based on the multicore job report """ myThread = threading.currentThread() config = self.getConfig() workloadPath = os.path.join(self.testDir, 'specDir', 'spec.pkl') workload = self.createWorkload(workloadName = workloadPath) testJobGroup = self.createTestJobGroup(config = config, name = workload.name(), specLocation = workloadPath, error = False, multicore = True) cachePath = os.path.join(config.JobCreator.jobCacheDir, "TestWorkload", "ReReco") os.makedirs(cachePath) self.assertTrue(os.path.exists(cachePath)) workflowName = "TestWorkload" dbname = config.TaskArchiver.workloadSummaryCouchDBName couchdb = CouchServer(config.JobStateMachine.couchurl) workdatabase = couchdb.connectDatabase(dbname) jobdb = couchdb.connectDatabase("%s/jobs" % self.databaseName) fwjrdb = couchdb.connectDatabase("%s/fwjrs" % self.databaseName) testTaskArchiver = TaskArchiverPoller(config = config) testTaskArchiver.algorithm() workloadSummary = workdatabase.document(id = "TestWorkload") self.assertEqual(workloadSummary['performance']['/TestWorkload/ReReco']['cmsRun1']['minMergeTime']['average'], 5.7624950408900002) self.assertEqual(workloadSummary['performance']['/TestWorkload/ReReco']['cmsRun1']['numberOfMerges']['average'], 3.0) self.assertEqual(workloadSummary['performance']['/TestWorkload/ReReco']['cmsRun1']['averageProcessTime']['average'], 29.369966666700002) return
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.AsyncTransfer try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') server = CouchServer(self.config.couch_instance) self.db = server.connectDatabase(self.config.files_database) self.logger.debug('Connected to CouchDB') statserver = CouchServer(self.config.couch_statinstance) self.statdb = statserver.connectDatabase(self.config.statitics_database) self.logger.debug('Connected to Stat CouchDB')
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.Analytics try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) self.logger.debug('Connected to local couchDB') config_server = CouchServer(dburl=self.config.config_couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.config_db = config_server.connectDatabase(self.config.config_database) self.amq_auth_file = self.config.amq_auth_file monitoring_server = CouchServer(dburl=self.config.couch_user_monitoring_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.monitoring_db = monitoring_server.connectDatabase(self.config.user_monitoring_db) self.logger.debug('Connected to user_monitoring_db in couchDB')
def __init__(self, config): BaseWorkerThread.__init__(self) self.config = config.AsyncTransfer try: self.logger.setLevel(self.config.log_level) except: import logging self.logger = logging.getLogger() self.logger.setLevel(self.config.log_level) self.logger.debug('Configuration loaded') server = CouchServer(self.config.couch_instance) self.db = server.connectDatabase(self.config.files_database) self.logger.debug('Connected to local couchDB') monitoring_server = CouchServer(self.config.couch_user_monitoring_instance) self.monitoring_db = monitoring_server.connectDatabase(self.config.user_monitoring_db) self.logger.debug('Connected to user_monitoring_db in couchDB')
def testJ_Resubmission(self): """ _Resubmission_ Test Resubmission """ userName = '******' groupName = 'Li' teamName = 'Tang' schema = utils.getAndSetupSchema(self, userName = userName, groupName = groupName, teamName = teamName) schema['RequestType'] = "ReReco" configID = self.createConfig() schema["ConfigCacheID"] = configID schema["CouchDBName"] = self.couchDBName schema["CouchURL"] = os.environ.get("COUCHURL") schema["CouchWorkloadDBName"] = self.couchDBName result = self.jsonSender.put('request', schema) requestName = result[0]['RequestName'] # user, group schema already set up schema = utils.getSchema(groupName = groupName, userName = userName) schema['RequestType'] = "Resubmission" schema["CouchWorkloadDBName"] = self.couchDBName try: raises = False result = self.jsonSender.put('request', schema) except HTTPException as ex: raises = True self.assertEqual(ex.status, 400) self.assertTrue("Error in Workload Validation: Validation failed: InitialTaskPath is mendatory" in ex.result) self.assertTrue(raises) schema["InitialTaskPath"] = '/%s/DataProcessing' % requestName schema["ACDCServer"] = os.environ.get("COUCHURL") schema["ACDCDatabase"] = self.couchDBName schema["CollectionName"] = "SomeOtherName" # Here we just make sure that real result goes through result = self.jsonSender.put('request', schema) resubmitName = result[0]['RequestName'] result = self.jsonSender.get('request/%s' % resubmitName) couchServer = CouchServer(self.testInit.couchUrl) reqmgrCouch = couchServer.connectDatabase(self.couchDBName) result = reqmgrCouch.loadView('ReqMgr', 'childresubmissionrequests', {}, [requestName])['rows'] self.assertEqual(len(result), 1) self.assertEqual(result[0]['key'], requestName) self.assertEqual(result[0]['id'], resubmitName)
def testE_multicore(self): """ _multicore_ Create a workload summary based on the multicore job report """ myThread = threading.currentThread() config = self.getConfig() workloadPath = os.path.join(self.testDir, "specDir", "spec.pkl") workload = self.createWorkload(workloadName=workloadPath) testJobGroup = self.createTestJobGroup( config=config, name=workload.name(), specLocation=workloadPath, error=False, multicore=True ) cachePath = os.path.join(config.JobCreator.jobCacheDir, "TestWorkload", "ReReco") os.makedirs(cachePath) self.assertTrue(os.path.exists(cachePath)) dbname = config.TaskArchiver.workloadSummaryCouchDBName couchdb = CouchServer(config.JobStateMachine.couchurl) workdatabase = couchdb.connectDatabase(dbname) testTaskArchiver = TaskArchiverPoller(config=config) testTaskArchiver.algorithm() result = myThread.dbi.processData("SELECT * FROM wmbs_job")[0].fetchall() self.assertEqual(len(result), 0, "No job should have survived") result = myThread.dbi.processData("SELECT * FROM wmbs_subscription")[0].fetchall() self.assertEqual(len(result), 0) result = myThread.dbi.processData("SELECT * FROM wmbs_jobgroup")[0].fetchall() self.assertEqual(len(result), 0) result = myThread.dbi.processData("SELECT * FROM wmbs_file_details")[0].fetchall() self.assertEqual(len(result), 0) workloadSummary = workdatabase.document(id="TestWorkload") self.assertAlmostEquals( workloadSummary["performance"]["/TestWorkload/ReReco"]["cmsRun1"]["minMergeTime"]["average"], 5.7624950408900002, places=2, ) self.assertAlmostEquals( workloadSummary["performance"]["/TestWorkload/ReReco"]["cmsRun1"]["numberOfMerges"]["average"], 3.0, places=2, ) self.assertAlmostEquals( workloadSummary["performance"]["/TestWorkload/ReReco"]["cmsRun1"]["averageProcessTime"]["average"], 29.369966666700002, places=2, ) return
def setUp(self): """ Initialize the database and couch. """ self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setupCouch("relvalmc_t", "ConfigCache") self.testInit.setSchema(customModules = ["WMCore.WMBS"], useDefault = False) couchServer = CouchServer(os.environ["COUCHURL"]) self.configDatabase = couchServer.connectDatabase("relvalmc_t")
def setUp(self): """ Initialize the database and couch. """ self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setupCouch("pileupfetcher_t", "ConfigCache") self.testInit.setSchema(customModules=["WMCore.WMBS"], useDefault=False) couchServer = CouchServer(os.environ["COUCHURL"]) self.configDatabase = couchServer.connectDatabase("pileupfetcher_t") self.testDir = self.testInit.generateWorkDir() EmulatorHelper.setEmulators(dbs=True)
def __init__(self, config): """ Initialise class members """ BaseDaemon.__init__(self, config, 'RetryManager') try: server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) except Exception, e: self.logger.exception( 'A problem occured when connecting to couchDB: %s' % e) raise
def __init__(self, config): """ Initialise class members """ #Need a better way to test this without turning off this next line BaseDaemon.__init__(self, config, 'DBSPublisher') server = CouchServer(dburl=self.config.couch_instance, ckey=self.config.opsProxy, cert=self.config.opsProxy) self.db = server.connectDatabase(self.config.files_database) self.logger.debug('Connected to CouchDB') # Set up a factory for loading plugins self.factory = WMFactory(self.config.schedAlgoDir, namespace=self.config.schedAlgoDir) self.pool = Pool(processes=self.config.publication_pool_size)
class Registration(): def __init__(self, cfg_dict={}, reg_info={}): """ Initialise the regsvc for this component, """ try: config_dict = { 'server': 'https://cmsweb.cern.ch/', 'database': 'registration', 'cacheduration': 1, } config_dict.update(cfg_dict) self.server = CouchServer(config_dict['server']) self.db = self.server.connectDatabase(config_dict['database']) if 'location' not in reg_info.keys(): raise KeyError('Registration needs a location in its reg_info') self.location_hash = str(reg_info['location'].__hash__()) reg_info['_id'] = self.location_hash reg_info['#config_hash'] = hash(str(reg_info)) push_cfg = True if self.db.documentExists(self.location_hash): # If the doc exists, check that the configuration hasn't changed doc = self.db.document(self.location_hash) push_cfg = doc['#config_hash'] != reg_info['#config_hash'] reg_info['_rev'] = doc['_rev'] if push_cfg: self.db.commitOne(reg_info) except: # Don't want to raise anything here # TODO: but should probably log... pass self.report() def report(self): """ 'Ping' the RegSvc with a doc containing the service doc's ID and a timestamp, this can be used to provide uptime information. """ try: self.db.commitOne({'service': self.location_hash}, timestamp=True) except: # Don't want to raise anything here # TODO: but should probably log... pass
def setUp(self): """ _setUp_ Initialize the database. """ self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setupCouch("mclhe_t", "ConfigCache") self.testInit.setSchema(customModules=["WMCore.WMBS"], useDefault=False) couchServer = CouchServer(os.environ["COUCHURL"]) self.configDatabase = couchServer.connectDatabase("mclhe_t") self.testDir = self.testInit.generateWorkDir() return