def agentsSites(url): "Return list of sites known in CMS WMAgents" sites_ready_in_agent = set() headers = {'Accept': 'application/json'} params = {} mgr = RequestHandler() res = mgr.getdata(url, params=params, headers=headers, ckey=ckey(), cert=cert()) data = json.loads(res) agents = {} for r in [i['value'] for i in data['rows']]: team = r['agent_team'] if team != 'production': continue agents.setdefault(team, []).append(r) for team, agents in viewitems(agents): for agent in agents: if agent['status'] != 'ok': continue for site, sinfo in viewitems(agent['WMBS_INFO']['thresholds']): if sinfo['state'] in ['Normal']: sites_ready_in_agent.add(site) return sites_ready_in_agent
def renewRucioToken(rucioAuthUrl, userToken): """ Provided a user Rucio token, check it's lifetime and extend it by another hour :param rucioAuthUrl: url to the rucio authentication server :param rucioAcct: rucio account to be used :return: a datetime.datetime object with the new token lifetime """ params = {} headers = {"X-Rucio-Auth-Token": userToken} url = '%s/auth/validate' % rucioAuthUrl logging.info("Renewing the Rucio token...") mgr = RequestHandler() res = mgr.getdata(url, params=params, headers=headers, ckey=ckey(), cert=cert()) try: newExpiration = eval(res)['lifetime'] except Exception as exc: raise RuntimeError( "Failed to renew Rucio token. Response: {} Error: {}".format( res, str(exc))) return newExpiration
def getUrlOpener(self): """ method getting an HTTPConnection, it is used by the constructor such that a sub class can override it to have different type of connection i.e. - if it needs authentication, or some fancy handler """ return RequestHandler(config={'timeout': 300, 'connecttimeout': 300})
def getRucioToken(rucioAuthUrl, rucioAcct): """ Provided a Rucio account, fetch a token from the authentication server :param rucioAuthUrl: url to the rucio authentication server :param rucioAcct: rucio account to be used :return: an integer with the expiration time in EPOCH """ params = {} headers = {"X-Rucio-Account": rucioAcct} url = '%s/auth/x509' % rucioAuthUrl logging.info( "Requesting a token to Rucio for account: %s, against url: %s", rucioAcct, rucioAuthUrl) mgr = RequestHandler() res = mgr.getheader(url, params=params, headers=headers, ckey=ckey(), cert=cert()) if res.getReason() == "OK": userToken = res.getHeaderKey('X-Rucio-Auth-Token') tokenExpiration = res.getHeaderKey('X-Rucio-Auth-Token-Expires') logging.info("Retrieved Rucio token valid until: %s", tokenExpiration) # convert the human readable expiration time to EPOCH time tokenExpiration = stringDateToEpoch(tokenExpiration) return userToken, tokenExpiration raise RuntimeError("Failed to acquire a Rucio token. Error: {}".format( res.getReason()))
def getSpec(self, request, reqSpecs=None): "Get request from workload cache" if reqSpecs and request['RequestName'] in reqSpecs: return reqSpecs[request['RequestName']] url = str('%s/%s/spec' % (self.msConfig['reqmgrCacheUrl'], request['RequestName'])) mgr = RequestHandler() data = mgr.getdata(url, params={}, cert=cert(), ckey=ckey()) return pickle.loads(data)
def makeRequest(url, params): ''' a test docstring ''' mgr = RequestHandler() header, data = mgr.request(url, params, ckey=ckey, cert=cert) if header.status != 200: print "ERROR" return data
def setUp(self): "initialization" self.mgr = RequestHandler() self.ckey = os.path.join(os.environ['HOME'], '.globus/userkey.pem') self.cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') self.cricheader = 'Date: Tue, 06 Nov 2018 14:50:29 GMT\r\nServer: Apache/2.4.6 (CentOS) OpenSSL/1.0.2k-fips mod_wsgi/3.4 Python/2.7.5 mod_gridsite/2.3.4\r\nVary: Cookie\r\nX-Frame-Options: SAMEORIGIN\r\nSet-Cookie: sessionid=bc1xu8zi5rbbsd5fgjuklb2tk2r3f6tw; expires=Sun, 11-Nov-2018 14:50:29 GMT; httponly; Max-Age=432000; Path=/\r\nContent-Length: 32631\r\nContent-Type: application/json\r\n\r\n' self.dbsheader = 'Date: Tue, 06 Nov 2018 14:39:07 GMT\r\nServer: Apache\r\nCMS-Server-Time: D=1503 t=1541515147806112\r\nTransfer-Encoding: chunked\r\nContent-Type: text/html\r\n\r\n' self.HTTPheader = 'Date: Tue, 06 Nov 2018 14:50:29 GMT\r\nServer: Apache/2.4.6 (CentOS) OpenSSL/1.0.2k-fips mod_wsgi/3.4 Python/2.7.5 mod_gridsite/2.3.4\r\nVary: Cookie\r\nX-Frame-Options: SAMEORIGIN\r\nSet-Cookie: GRIDHTTP_PASSCODE=2c6da9c96efa2ad0farhda; domain=cms-cric.cern.ch; path=/; secure\r\nContent-Length: 32631\r\nContent-Type: application/json\r\n\r\n'
def getNodes(kind): "Get list of PhEDEx nodes" params = {} headers = {'Accept': 'application/json'} url = '%s/nodes' % phedexUrl() mgr = RequestHandler() data = mgr.getdata(url, params=params, headers=headers, ckey=ckey(), cert=cert()) nodes = json.loads(data)['phedex']['node'] return [node['name'] for node in nodes if node['kind'] == kind]
def __init__(self, alertManagerUrl, logger=None): self.alertManagerUrl = alertManagerUrl # sender's hostname is added as an annotation self.hostname = socket.gethostname() self.mgr = RequestHandler() self.ltz = LocalTimezone() self.headers = {"Content-Type": "application/json"} self.validSeverity = ["high", "medium", "low"] self.logger = logger if logger else logging.getLogger()
def getdata(url, params, headers=None): "Helper function to get data from the service" ckey, cert = getKeyCertFromEnv() mgr = RequestHandler() res = mgr.getdata(url, params=params, headers=headers, ckey=ckey, cert=cert) return json.loads(res)
def getNodesForId(phedexid): "Helper function to get nodes for given phedex id" url = '%s/requestlist' % phedexUrl() params = {'request': str(phedexid)} headers = {'Accept': 'application/json'} mgr = RequestHandler() data = mgr.getdata(url, params, headers, ckey=ckey(), cert=cert()) items = json.loads(data)['phedex']['request'] nodes = [n['name'] for i in items for n in i['node']] return list(set(nodes))
def postRequest(url, params): "Helper function to POST request to given URL" mgr = RequestHandler() headers = {'Accept': 'application/json'} verbose = 0 if 'verbose' in params: verbose = params['verbose'] del params['verbose'] data = mgr.getdata(url, params, headers, ckey=ckey(), cert=cert(), \ verb='POST', verbose=verbose) return data
def getDetoxQuota(url): "Get list of workflow info from ReqMgr2 data-service for given request name" headers = {} params = {} mgr = RequestHandler() res = mgr.getdata(url, params=params, headers=headers, ckey=ckey(), cert=cert()) res = res.split('\n') return res
def findParent(dataset, dbsUrl): "Helper function to find a parent of the dataset" url = '%s/datasetparents' % dbsUrl params = {'dataset': dataset} headers = {'Accept': 'application/json'} mgr = RequestHandler() data = mgr.getdata(url, params=params, headers=headers, cert=cert(), ckey=ckey()) return [str(i['parent_dataset']) for i in json.loads(data)]
def getNodeQueues(): "Helper function to fetch nodes usage from PhEDEx data service" headers = {'Accept': 'application/json'} params = {} mgr = RequestHandler() url = '%s/nodeusagehistory' % phedexUrl() res = mgr.getdata(url, params=params, headers=headers, ckey=ckey(), cert=cert()) data = json.loads(res) ret = defaultdict(int) for node in data['phedex']['node']: for usage in node['usage']: ret[node['name']] += int(usage['miss_bytes'] / 1023.**4) #in TB return ret
def getWorkflows(state): "Get list of workflows from ReqMgr2 data-service" url = '%s/data/request' % reqmgrUrl() headers = {'Accept': 'application/json'} params = {'status': state} mgr = RequestHandler() res = mgr.getdata(url, params=params, headers=headers, ckey=ckey(), cert=cert()) data = json.loads(res) return data.get('result', [])
def getWorkflow(requestName, reqMgrUrl): "Get list of workflow info from ReqMgr2 data-service for given request name" headers = {'Accept': 'application/json'} params = {} url = '%s/data/request/%s' % (reqMgrUrl, requestName) mgr = RequestHandler() res = mgr.getdata(url, params=params, headers=headers, ckey=ckey(), cert=cert()) data = json.loads(res) return data.get('result', [])
def setUp(self): "Setup MicroService for testing" self.app = ServiceManager() config = TestConfig manager = 'WMCore_t.Services_t.MicroService_t.MicroService_t.ServiceManager' config.views.data.manager = manager config.manager = manager mount = '/microservice' self.mgr = RequestHandler() self.port = config.main.port self.url = 'http://localhost:%s%s/data' % (self.port, mount) cherrypy.config["server.socket_port"] = self.port self.server = RestInterface(self.app, config, mount) cherrypy.tree.mount(self.server, mount) cherrypy.engine.start()
def __init__(self, url='http://localhost', idict=None): """ url should really be host - TODO fix that when have sufficient code coverage and change _getURLOpener if needed """ if not idict: idict = {} dict.__init__(self, idict) self.pycurl = idict.get('pycurl', None) self.capath = idict.get('capath', None) if self.pycurl: self.reqmgr = RequestHandler() # set up defaults self.setdefault("accept_type", 'text/html') self.setdefault("content_type", 'application/x-www-form-urlencoded') self.additionalHeaders = {} # check for basic auth early, as if found this changes the url urlComponent = sanitizeURL(url) if urlComponent['username'] is not None: self.addBasicAuth(urlComponent['username'], urlComponent['password']) url = urlComponent['url'] # remove user, password from url self.setdefault("host", url) # then update with the incoming dict self.update(idict) self['endpoint_components'] = urlparse.urlparse(self['host']) # If cachepath = None disable caching if 'cachepath' in idict and idict['cachepath'] is None: self["req_cache_path"] = None else: cache_dir = (self.cachePath(idict.get('cachepath'), idict.get('service_name'))) self["cachepath"] = cache_dir self["req_cache_path"] = os.path.join(cache_dir, '.cache') self.setdefault("cert", None) self.setdefault("key", None) self.setdefault('capath', None) self.setdefault("timeout", 300) self.setdefault("logger", logging) check_server_url(self['host'])
def _postRequest(self, url, params, verb='POST', verbose=0): "Helper function to POST request to given URL" mgr = RequestHandler(logger=self.logger) headers = copy(self.configDict['headers']) headers.update({"Authorization": self._token}) try: data = mgr.getdata(url, params, headers, verb=verb, verbose=verbose) return json.loads(data) except Exception as exc: self.logger.error("Failed to retrieve data from MonIT. Error: %s", str(exc)) return None
def workqueueRequests(state=None): "Helper functions to get requests from WorkQueue" url = workqueueView('jobsByRequest') if state: pass # we may need to use state when we'll query WorkQueue params = {} headers = {'Accept': 'application/json'} mgr = RequestHandler() data = mgr.getdata(url, params=params, headers=headers, cert=cert(), ckey=ckey()) data = json.loads(data) rdict = {} for row in data.get('rows', []): rdict[row['key']] = row['value'] return rdict
def getDataFromURL(url, proxyfilename = None): """ Read the content of a URL and return it as a string. Type of content should not matter, it can be a json file or a tarball for example. url: the link you would like to retrieve proxyfilename: the x509 proxy certificate to be used in case auth is required Returns binary data encoded as a string, which can be later processed according to what kind of content it represents. """ # Get rid of unicode which may cause problems in pycurl stringUrl = url.encode('ascii') reqHandler = RequestHandler() _, data = reqHandler.request(url=stringUrl, params={}, ckey=proxyfilename, cert=proxyfilename, capath=HTTPRequests.getCACertPath()) return data
def alterSubscription(phedexid, decision, comments, nodes=None): "Helper function to alter subscriptions for given phedex id and nodes" mgr = RequestHandler() headers = {'Accept': 'application/json'} nodes = nodes if nodes else getNodesForId(phedexid) params = { 'decision': decision, 'request': phedexid, 'node': ','.join(nodes), 'comments': comments } url = '%s/updaterequest' data = mgr.getdata(url, params, headers, ckey=ckey(), cert=cert(), verb='POST') result = json.loads(data) if not result: return False if 'already' in result: return True return result
def __init__(self, msConfig, logger=None): """ Runs the basic setup and initialization for the MSRuleCleaner module :param msConfig: micro service configuration """ super(MSRuleCleaner, self).__init__(msConfig, logger=logger) self.msConfig.setdefault("verbose", True) self.msConfig.setdefault("interval", 60) self.msConfig.setdefault("services", ['ruleCleaner']) self.msConfig.setdefault("rucioWmaAccount", "wma_test") self.msConfig.setdefault("rucioMStrAccount", "wmcore_transferor") self.msConfig.setdefault('enableRealMode', False) self.mode = "RealMode" if self.msConfig[ 'enableRealMode'] else "DryRunMode" self.emailAlert = EmailAlert(self.msConfig) self.curlMgr = RequestHandler() # Building all the Pipelines: pName = 'plineMSTrCont' self.plineMSTrCont = Pipeline(name=pName, funcLine=[ Functor(self.setPlineMarker, pName), Functor(self.cleanRucioRules) ]) pName = 'plineMSTrBlock' self.plineMSTrBlock = Pipeline(name=pName, funcLine=[ Functor(self.setPlineMarker, pName), Functor(self.cleanRucioRules) ]) pName = 'plineAgentCont' self.plineAgentCont = Pipeline( name=pName, funcLine=[ Functor(self.setPlineMarker, pName), Functor(self.getRucioRules, 'container', self.msConfig['rucioWmaAccount']), Functor(self.cleanRucioRules) ]) pName = 'plineAgentBlock' self.plineAgentBlock = Pipeline( name=pName, funcLine=[ Functor(self.setPlineMarker, pName), Functor(self.getRucioRules, 'block', self.msConfig['rucioWmaAccount']), Functor(self.cleanRucioRules) ]) pName = 'plineArchive' self.plineArchive = Pipeline(name=pName, funcLine=[ Functor(self.setPlineMarker, pName), Functor(self.setClean), Functor(self.archive) ]) # Building the different set of plines we will need later: # NOTE: The following are all the functional pipelines which are supposed to include # a cleanup function and report cleanup status in the MSRuleCleanerWflow object self.cleanuplines = [ self.plineMSTrCont, self.plineMSTrBlock, self.plineAgentCont, self.plineAgentBlock ] # Building an auxiliary list of cleanup pipeline names only: self.cleanupPipeNames = [pline.name for pline in self.cleanuplines] # Building lists of pipelines related only to Agents or MStransferror self.agentlines = [self.plineAgentCont, self.plineAgentBlock] self.mstrlines = [self.plineMSTrCont, self.plineMSTrBlock] # Initialization of the 'cleaned' and 'archived' counters: self.wfCounters = {'cleaned': {}, 'archived': 0}
def __init__(self, config, quiet): """ Initialise class members """ self.config = config.General self.max_files_per_block = self.config.max_files_per_block #self.userCert = self.config.opsCert #self.userKey = self.config.opsKey self.block_publication_timeout = self.config.block_closure_timeout self.lfn_map = {} self.force_publication = False self.force_failure = False #TODO: logger! def createLogdir(dirname): """ Create the directory dirname ignoring erors in case it exists. Exit if the directory cannot be created. """ try: os.mkdir(dirname) except OSError as ose: if ose.errno != 17: #ignore the "Directory already exists error" print(str(ose)) print("The task worker need to access the '%s' directory" % dirname) sys.exit(1) def setRootLogger(quiet, debug): """Sets the root logger with the desired verbosity level The root logger logs to logs/twlog.txt and every single logging instruction is propagated to it (not really nice to read) :arg bool quiet: it tells if a quiet logger is needed :arg bool debug: it tells if needs a verbose logger :return logger: a logger with the appropriate logger level.""" createLogdir('logs') createLogdir('logs/processes') createLogdir('logs/tasks') logHandler = MultiProcessingLog('logs/log.txt', when='midnight') logFormatter = logging.Formatter("%(asctime)s:%(levelname)s:%(module)s,%(lineno)d:%(message)s") logHandler.setFormatter(logFormatter) logging.getLogger().addHandler(logHandler) loglevel = logging.INFO if quiet: loglevel = logging.WARNING if debug: loglevel = logging.DEBUG logging.getLogger().setLevel(loglevel) logger = setProcessLogger("master") logger.debug("PID %s.", os.getpid()) logger.debug("Logging level initialized to %s.", loglevel) return logger self.cache_area = self.config.cache_area self.logger = setRootLogger(quiet, True) try: self.oracleDB = HTTPRequests(self.config.oracleDB, self.config.opsCert, self.config.opsKey) self.logger.debug('Contacting OracleDB:' + self.config.oracleDB) except: self.logger.exception('Failed when contacting Oracle') raise try: self.connection = RequestHandler(config={'timeout': 900, 'connecttimeout' : 900}) except Exception as ex: msg = "Error initializing the connection" msg += str(ex) msg += str(traceback.format_exc()) self.logger.debug(msg)
def __init__(self, rest, config): super(AuxCacheUpdateTasks, self).__init__(config) self.reqmgrAux = ReqMgrAux(config.reqmgr2_url, logger=self.logger) self.mgr = RequestHandler()