コード例 #1
0
ファイル: MSCore.py プロジェクト: davidlange6/WMCore
    def __init__(self, msConfig, logger=None):
        """
        Provides setup for MSTransferor and MSMonitor classes

        :param config: MS service configuration
        :param logger: logger object (optional)
        """
        self.logger = getMSLogger(getattr(msConfig, 'verbose', False), logger)
        self.msConfig = msConfig
        self.logger.info("Configuration including default values:\n%s",
                         self.msConfig)

        self.reqmgr2 = ReqMgr(self.msConfig['reqmgr2Url'], logger=self.logger)
        self.reqmgrAux = ReqMgrAux(self.msConfig['reqmgr2Url'],
                                   httpDict={'cacheduration': 1.0},
                                   logger=self.logger)

        # hard code it to production DBS otherwise PhEDEx subscribe API fails to match TMDB data
        dbsUrl = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
        if usingRucio():
            # FIXME: we cannot use Rucio in write mode yet
            # self.rucio = Rucio(self.msConfig['rucioAccount'], configDict={"logger": self.logger})
            self.phedex = PhEDEx(httpDict={'cacheduration': 0.5},
                                 dbsUrl=dbsUrl,
                                 logger=self.logger)
        else:
            self.phedex = PhEDEx(httpDict={'cacheduration': 0.5},
                                 dbsUrl=dbsUrl,
                                 logger=self.logger)
コード例 #2
0
    def __init__(self, msConfig, **kwargs):
        """
        Provides setup for MSTransferor and MSMonitor classes

        :param config: MS service configuration
        :param kwargs: can be used to skip the initialization of specific services, such as:
            logger: logger object
            skipReqMgr: boolean to skip ReqMgr initialization
            skipReqMgrAux: boolean to skip ReqMgrAux initialization
            skipRucio: boolean to skip Rucio initialization
        """
        self.logger = getMSLogger(getattr(msConfig, 'verbose', False), kwargs.get("logger"))
        self.msConfig = msConfig
        self.logger.info("Configuration including default values:\n%s", self.msConfig)

        if not kwargs.get("skipReqMgr", False):
            self.reqmgr2 = ReqMgr(self.msConfig['reqmgr2Url'], logger=self.logger)
        if not kwargs.get("skipReqMgrAux", False):
            self.reqmgrAux = ReqMgrAux(self.msConfig['reqmgr2Url'],
                                       httpDict={'cacheduration': 1.0}, logger=self.logger)

        self.phedex = None
        self.rucio = None
        if not kwargs.get("skipRucio", False):
            self.rucio = Rucio(acct=self.msConfig['rucioAccount'],
                               hostUrl=self.msConfig['rucioUrl'],
                               authUrl=self.msConfig['rucioAuthUrl'],
                               configDict={"logger": self.logger, "user_agent": "wmcore-microservices"})
コード例 #3
0
    def __init__(self, rest, config):

        super(CouchDBCleanup, self).__init__(config)
        self.reqDB = RequestDBReader(config.reqmgrdb_url)
        self.reqmgrAux = ReqMgrAux(config.reqmgr2_url, logger=self.logger)
        # statuses that we want to keep the transfer documents
        self.transferStatuses = [
            "assigned", "staging", "staged", "acquired", "failed",
            "running-open", "running-closed"
        ]

        baseURL, acdcDB = splitCouchServiceURL(config.acdc_url)
        self.acdcService = CouchService(url=baseURL, database=acdcDB)
コード例 #4
0
ファイル: DrainStatusPoller.py プロジェクト: lecriste/WMCore
    def __init__(self, config):
        """
        initialize properties specified from config
        """
        BaseWorkerThread.__init__(self)
        self.config = config
        self.drainAPI = DrainStatusAPI()
        self.condorAPI = PyCondorAPI()
        self.agentConfig = {}
        self.validSpeedDrainConfigKeys = [
            'CondorPriority', 'NoJobRetries', 'EnableAllSites'
        ]

        self.reqAuxDB = ReqMgrAux(self.config.General.ReqMgr2ServiceURL)
コード例 #5
0
def main():
    "Main function"
    optmgr = OptionParser()
    opts = optmgr.parser.parse_args()
    verbose = int(opts.verbose)
    logger = None
    mgr = None
    if verbose:
        logger = logging.getLogger('parse_campaign')
        logger.setLevel(logging.DEBUG)
        logging.basicConfig()
    if opts.url:
        key = os.getenv('X509_USER_KEY', '')
        cert = os.getenv('X509_USER_CERT', '')
        proxy = os.getenv('X509_USER_PROXY', '')
        if proxy and not cert:
            cert = proxy
            key = proxy
        hdict = {'cert': cert, 'key': key, 'pycurl': True}
        mgr = ReqMgrAux(opts.url, hdict, logger=logger)
    if opts.dburi:
        conn = MongoClient(host=opts.dburi)
        dbname = opts.dbname
        dbcoll = opts.dbcoll
        if verbose:
            print("### read data from '%s', %s/%s" %
                  (opts.dburi, dbname, dbcoll))
        data = [r for r in conn[dbname][dbcoll].find()]
    else:
        fin = opts.fin
        if verbose:
            print("### read data from '%s'" % fin)
        with open(fin, 'r') as istream:
            data = []
            for key, val in json.load(istream).items():
                rec = {'name': key}
                rec.update(val)
                data.append(rec)
    rawRecords = parse(data, verbose)

    output = []  # in case we want to dump all records to a json file
    for rec in process(rawRecords):
        output.append(rec)
        print(json.dumps(rec))
        upload(mgr, rec)
    if opts.fout:
        print("Saving all %d unique campaign records to: %s\n" %
              (len(output), opts.fout))
        with open(opts.fout, "w") as jo:
            json.dump(output, jo, indent=2)
コード例 #6
0
    def __init__(self, microConfig, uniConfig, logger=None):
        """
        Runs the basic setup and initialization for the MS Transferor module
        :param microConfig: microservice configuration
        """
        self.msConfig = microConfig
        self.uConfig = uniConfig
        self.reqRecords = []
        self.logger = getMSLogger(microConfig['verbose'], logger=logger)

        self.reqmgr2 = ReqMgr(microConfig['reqmgrUrl'], logger=self.logger)
        self.reqmgrAux = ReqMgrAux(microConfig['reqmgrUrl'], httpDict={'cacheduration': 60}, logger=self.logger)
        # eventually will change it to Rucio
        self.phedex = PhEDEx(httpDict={'cacheduration': 10 * 60},
                             dbsUrl=microConfig['dbsUrl'], logger=self.logger)
コード例 #7
0
    def __init__(self, rest, config):

        super(BuildParentLock, self).__init__(config)
        self.reqmgrAux = ReqMgrAux(config.reqmgr2_url, logger=self.logger)
        self.dbs = DBS3Reader(config.dbs_url)
        # cache of dbs lookups mapping input dataset to parent dataset
        self.dbsLookupCache = {}
        # set of of currently active datasets requiring parent dataset
        self.inputDatasetCache = set()
        self.reqDB = RequestDBReader(config.reqmgrdb_url)
        self.filterKeys = [
            'assignment-approved', 'assigned', 'staging', 'staged', 'failed',
            'acquired', 'running-open', 'running-closed', 'force-complete',
            'completed', 'closed-out'
        ]
コード例 #8
0
 def __init__(self, config):
     """
     initialize properties specified from config
     """
     BaseWorkerThread.__init__(self)
     self.config = config
     self.drainAPI = DrainStatusAPI(config)
     self.condorAPI = PyCondorAPI()
     self.agentConfig = {}
     self.previousConfig = {}
     self.validSpeedDrainConfigKeys = [
         'CondorPriority', 'NoJobRetries', 'EnableAllSites'
     ]
     self.reqAuxDB = ReqMgrAux(self.config.General.ReqMgr2ServiceURL)
     self.emailAlert = EmailAlert(config.EmailAlert.dictionary_())
     self.condorStates = ("Running", "Idle")
コード例 #9
0
    def __init__(self, config):
        """
        Initialise class members
        """
        BaseWorkerThread.__init__(self)
        self.config = config

        myThread = threading.currentThread()

        self.daoFactory = DAOFactory(package="WMCore.WMBS",
                                     logger=myThread.logger,
                                     dbinterface=myThread.dbi)
        self.changeState = ChangeState(self.config)

        if hasattr(self.config, "Tier0Feeder"):
            self.reqAuxDB = None
            self.maxRetries = self.config.ErrorHandler.maxRetries
        else:
            self.reqAuxDB = ReqMgrAux(self.config.General.ReqMgr2ServiceURL)
            self.maxRetries = self.reqAuxDB.getWMAgentConfig(
                self.config.Agent.hostName).get("MaxRetries")

        if not isinstance(self.maxRetries, dict):
            self.maxRetries = {'default': self.maxRetries}
        if 'default' not in self.maxRetries:
            raise ErrorHandlerException(
                'Max retries for the default job type must be specified')

        self.exitCodesNoRetry = []
        self.maxProcessSize = getattr(self.config.ErrorHandler,
                                      'maxProcessSize', 250)
        self.maxFailTime = getattr(self.config.ErrorHandler, 'maxFailTime',
                                   32 * 3600)
        self.readFWJR = getattr(self.config.ErrorHandler, 'readFWJR', False)
        self.passCodes = getattr(self.config.ErrorHandler, 'passExitCodes', [])

        self.getJobs = self.daoFactory(classname="Jobs.GetAllJobs")
        self.idLoad = self.daoFactory(classname="Jobs.LoadFromIDWithType")
        self.loadAction = self.daoFactory(classname="Jobs.LoadForErrorHandler")

        self.dataCollection = DataCollectionService(
            url=config.ACDC.couchurl, database=config.ACDC.database)

        return
コード例 #10
0
ファイル: MSCore.py プロジェクト: haozturk/WMCore
    def __init__(self, msConfig, **kwargs):
        """
        Provides setup for MSTransferor and MSMonitor classes

        :param config: MS service configuration
        :param kwargs: can be used to skip the initialization of specific services, such as:
            logger: logger object
            skipReqMgr: boolean to skip ReqMgr initialization
            skipReqMgrAux: boolean to skip ReqMgrAux initialization
            skipRucio: boolean to skip Rucio initialization
            skipPhEDEx: boolean to skip PhEDEx initialization
        """
        self.logger = getMSLogger(getattr(msConfig, 'verbose', False),
                                  kwargs.get("logger"))
        self.msConfig = msConfig
        self.logger.info("Configuration including default values:\n%s",
                         self.msConfig)

        if not kwargs.get("skipReqMgr", False):
            self.reqmgr2 = ReqMgr(self.msConfig['reqmgr2Url'],
                                  logger=self.logger)
        if not kwargs.get("skipReqMgrAux", False):
            self.reqmgrAux = ReqMgrAux(self.msConfig['reqmgr2Url'],
                                       httpDict={'cacheduration': 1.0},
                                       logger=self.logger)

        self.phedex = None
        self.rucio = None
        if self.msConfig.get('useRucio',
                             False) and not kwargs.get("skipRucio", False):
            self.rucio = Rucio(acct=self.msConfig['rucioAccount'],
                               hostUrl=self.msConfig['rucioUrl'],
                               authUrl=self.msConfig['rucioAuthUrl'],
                               configDict={
                                   "logger": self.logger,
                                   "user_agent": "wmcore-microservices"
                               })
        elif not kwargs.get("skipPhEDEx", False):
            # hard code it to production DBS otherwise PhEDEx subscribe API fails to match TMDB data
            dbsUrl = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
            self.phedex = PhEDEx(httpDict={'cacheduration': 0.5},
                                 dbsUrl=dbsUrl,
                                 logger=self.logger)
コード例 #11
0
    def __init__(self, config=None, logger=None):
        """
        Setup a bunch of things, like:
         * logger for this service
         * initialize all the necessary service helpers
         * fetch the unified configuration from central couch
         * update the unified configuration with some deployment and default settings
         * start both transfer and monitor threads
        :param config: reqmgr2ms service configuration
        :param logger:
        """
        self.uConfig = {}
        self.config = config
        self.logger = getMSLogger(getattr(config, 'verbose', False), logger)
        self._parseConfig(config)
        self.logger.info("Configuration including default values:\n%s",
                         self.msConfig)

        self.reqmgr2 = ReqMgr(self.msConfig['reqmgrUrl'], logger=self.logger)
        self.reqmgrAux = ReqMgrAux(self.msConfig['reqmgrUrl'],
                                   httpDict={'cacheduration': 60},
                                   logger=self.logger)

        # transferor has to look at workflows in assigned status
        self.msTransferor = MSTransferor(self.msConfig,
                                         "assigned",
                                         logger=self.logger)

        ### Last but not least, get the threads started
        thname = 'MSTransferor'
        self.transfThread = start_new_thread(
            thname, daemon, (self.transferor, 'assigned',
                             self.msConfig['interval'], self.logger))
        self.logger.debug("### Running %s thread %s", thname,
                          self.transfThread.running())

        thname = 'MSTransferorMonit'
        self.monitThread = start_new_thread(
            thname, daemon, (self.monitor, 'staging',
                             self.msConfig['interval'] * 2, self.logger))
        self.logger.debug("+++ Running %s thread %s", thname,
                          self.monitThread.running())
コード例 #12
0
    def __init__(self, config):
        """
        Initialise class members
        """
        BaseWorkerThread.__init__(self)
        self.config = config

        myThread = threading.currentThread()

        self.daoFactory = DAOFactory(package="WMCore.WMBS",
                                     logger=myThread.logger,
                                     dbinterface=myThread.dbi)
        self.changeState = ChangeState(self.config)

        if hasattr(self.config, "Tier0Feeder"):
            self.reqAuxDB = None
            self.maxRetries = self.config.ErrorHandler.maxRetries
        else:
            self.reqAuxDB = ReqMgrAux(self.config.General.ReqMgr2ServiceURL)

        self.exitCodesNoRetry = []
        self.maxProcessSize = getattr(self.config.ErrorHandler,
                                      'maxProcessSize', 250)
        self.maxFailTime = getattr(self.config.ErrorHandler, 'maxFailTime',
                                   32 * 3600)
        self.readFWJR = getattr(self.config.ErrorHandler, 'readFWJR', False)
        self.passCodes = getattr(self.config.ErrorHandler, 'passExitCodes', [])

        self.getJobs = self.daoFactory(classname="Jobs.GetAllJobs")
        self.idLoad = self.daoFactory(classname="Jobs.LoadFromIDWithType")
        self.loadAction = self.daoFactory(classname="Jobs.LoadForErrorHandler")

        self.dataCollection = DataCollectionService(
            url=config.ACDC.couchurl, database=config.ACDC.database)

        self.setupComponentParam()

        return
コード例 #13
0
    def __init__(self, config):
        BaseWorkerThread.__init__(self)
        myThread = threading.currentThread()
        self.config = config

        #DAO factory for WMBS objects
        self.daoFactory = DAOFactory(package="WMCore.WMBS",
                                     logger=logging,
                                     dbinterface=myThread.dbi)

        #Libraries
        self.resourceControl = ResourceControl()
        self.changeState = ChangeState(self.config)
        self.bossAir = BossAirAPI(config=self.config)

        self.hostName = self.config.Agent.hostName
        self.repollCount = getattr(self.config.JobSubmitter, 'repollCount',
                                   10000)
        self.maxJobsPerPoll = int(
            getattr(self.config.JobSubmitter, 'maxJobsPerPoll', 1000))
        self.maxJobsThisCycle = self.maxJobsPerPoll  # changes as per schedd limit
        self.cacheRefreshSize = int(
            getattr(self.config.JobSubmitter, 'cacheRefreshSize', 30000))
        self.skipRefreshCount = int(
            getattr(self.config.JobSubmitter, 'skipRefreshCount', 20))
        self.packageSize = getattr(self.config.JobSubmitter, 'packageSize',
                                   500)
        self.collSize = getattr(self.config.JobSubmitter, 'collectionSize',
                                self.packageSize * 1000)
        self.maxTaskPriority = getattr(self.config.BossAir, 'maxTaskPriority',
                                       1e7)
        self.condorFraction = 0.75  # update during every algorithm cycle
        self.condorOverflowFraction = 0.2
        self.ioboundTypes = ('LogCollect', 'Merge', 'Cleanup', 'Harvesting')

        # Additions for caching-based JobSubmitter
        self.cachedJobIDs = set()
        self.cachedJobs = {}
        self.jobDataCache = {}
        self.jobsToPackage = {}
        self.sandboxPackage = {}
        self.locationDict = {}
        self.taskTypePrioMap = {}
        self.drainSites = set()
        self.abortSites = set()
        self.refreshPollingCount = 0

        try:
            if not getattr(self.config.JobSubmitter, 'submitDir', None):
                self.config.JobSubmitter.submitDir = self.config.JobSubmitter.componentDir
            self.packageDir = os.path.join(self.config.JobSubmitter.submitDir,
                                           'packages')

            if not os.path.exists(self.packageDir):
                os.makedirs(self.packageDir)
        except OSError as ex:
            msg = "Error while trying to create packageDir %s\n!"
            msg += str(ex)
            logging.error(msg)
            logging.debug("PackageDir: %s", self.packageDir)
            logging.debug("Config: %s", config)
            raise JobSubmitterPollerException(msg)

        # Now the DAOs
        self.listJobsAction = self.daoFactory(
            classname="Jobs.ListForSubmitter")
        self.setLocationAction = self.daoFactory(classname="Jobs.SetLocation")
        self.locationAction = self.daoFactory(
            classname="Locations.GetSiteInfo")
        self.setFWJRPathAction = self.daoFactory(classname="Jobs.SetFWJRPath")
        self.listWorkflows = self.daoFactory(
            classname="Workflow.ListForSubmitter")

        # Keep a record of the thresholds in memory
        self.currentRcThresholds = {}

        self.useReqMgrForCompletionCheck = getattr(
            self.config.TaskArchiver, 'useReqMgrForCompletionCheck', True)

        if self.useReqMgrForCompletionCheck:
            # only set up this when reqmgr is used (not Tier0)
            self.reqmgr2Svc = ReqMgr(self.config.General.ReqMgr2ServiceURL)
            self.abortedAndForceCompleteWorkflowCache = self.reqmgr2Svc.getAbortedAndForceCompleteRequestsFromMemoryCache(
            )
            self.reqAuxDB = ReqMgrAux(self.config.General.ReqMgr2ServiceURL)
        else:
            # Tier0 Case - just for the clarity (This private variable shouldn't be used
            self.abortedAndForceCompleteWorkflowCache = None

        return
コード例 #14
0
def main():
    "Main function"
    optmgr = OptionParser()
    opts = optmgr.parser.parse_args()
    verbose = int(opts.verbose)
    logger = None
    mgr = None
    inputWMCore = False
    if verbose:
        logger = logging.getLogger('parse_campaign')
        logger.setLevel(logging.DEBUG)
        logging.basicConfig()
    if opts.url:
        key = os.getenv('X509_USER_KEY', '')
        cert = os.getenv('X509_USER_CERT', '')
        proxy = os.getenv('X509_USER_PROXY', '')
        if proxy and not cert:
            cert = proxy
            key = proxy
        hdict = {'cert': cert, 'key': key, 'pycurl': True}
        mgr = ReqMgrAux(opts.url, hdict, logger=logger)
    if opts.dburi:
        conn = MongoClient(host=opts.dburi)
        dbname = opts.dbname
        dbcoll = opts.dbcoll
        if verbose:
            print("### read data from '%s', %s/%s" %
                  (opts.dburi, dbname, dbcoll))
        data = [r for r in conn[dbname][dbcoll].find()]
    elif opts.fin:
        fin = opts.fin
        if verbose:
            print("### read data from '%s'" % fin)
        data = []
        with open(fin, 'r') as istream:
            campData = json.load(istream)
            if isinstance(campData, dict):
                # then it's a Unified-like campaign schema
                for key, val in campData.items():
                    rec = {'name': key}
                    rec.update(val)
                    data.append(rec)
            elif isinstance(campData, list):
                # then the input file has WMCore-like campaign schema
                print("Found %d campaigns in the input file." % len(campData))
                data = campData
                inputWMCore = True
    if not inputWMCore:
        data = parse(data, verbose)

    output = []  # in case we want to dump all records to a json file
    for rec in process(data):
        output.append(rec)
        upload(mgr, rec)
    if opts.testcamp:
        insertTestCampaigns(mgr)
    if opts.fout:
        print("Saving all %d unique campaign records to: %s\n" %
              (len(output), opts.fout))
        with open(opts.fout, "w") as jo:
            json.dump(output, jo, indent=2)
コード例 #15
0
 def __init__(self, config=None):
     self.config = config
     self.reqmgrAux = ReqMgrAux(self.config.reqmgr2_url)
     self.requests = {}
     self.reqManager = RequestManager()
     self.taskManager = TaskManager(nworkers=3)
コード例 #16
0
    def __init__(self, rest, config):

        super(AuxCacheUpdateTasks, self).__init__(config)
        self.reqmgrAux = ReqMgrAux(config.reqmgr2_url, logger=self.logger)
        self.mgr = RequestHandler()
コード例 #17
0
    def __init__(self, rest, config):

        super(AuxCacheUpdateTasks, self).__init__(config)
        self.reqmgrAux = ReqMgrAux(config.reqmgr2_url)