def loadRequestSchema(workload, requestSchema): """ _loadRequestSchema_ Does modifications to the workload I don't understand Takes a WMWorkloadHelper, operates on it directly with the schema """ schema = workload.data.request.section_('schema') for key, value in requestSchema.iteritems(): if type(value) == dict and key == 'LumiList': value = JsonWrapper.dumps(value) try: setattr(schema, key, value) except Exception as ex: # Attach TaskChain tasks if type(value) == dict and requestSchema['RequestType'] == 'TaskChain' and 'Task' in key: newSec = schema.section_(key) for k, v in requestSchema[key].iteritems(): if type(value) == dict and key == 'LumiList': value = JsonWrapper.dumps(value) try: setattr(newSec, k, v) except Exception as ex: # this logging need to change to cherry py logging logging.error("Invalid Value: %s" % str(ex)) else: # this logging need to change to cherry py logging logging.error("Invalid Value: %s" % str(ex)) schema.timeStamp = int(time.time()) schema = workload.data.request.schema # might belong in another method to apply existing schema workload.data.owner.Group = schema.Group workload.data.owner.Requestor = schema.Requestor
def set_opts(self, curl, url, params, headers, ckey=None, cert=None, capath=None, verbose=None, verb='GET', doseq=True, cainfo=None): """Set options for given curl object, params should be a dictionary""" if params == None: params = {} if not isinstance(params, dict): raise TypeError("pycurl parameters should be passed as dictionary") curl.setopt(pycurl.NOSIGNAL, self.nosignal) curl.setopt(pycurl.TIMEOUT, self.timeout) curl.setopt(pycurl.CONNECTTIMEOUT, self.connecttimeout) curl.setopt(pycurl.FOLLOWLOCATION, self.followlocation) curl.setopt(pycurl.MAXREDIRS, self.maxredirs) encoded_data = urllib.urlencode(params, doseq=doseq) if verb == 'GET': url = url + '?' + encoded_data elif verb == 'HEAD': url = url + '?' + encoded_data curl.setopt(pycurl.CUSTOMREQUEST, verb) curl.setopt(pycurl.HEADER, 1) curl.setopt(pycurl.NOBODY, True) elif verb == 'POST': curl.setopt(pycurl.POST, 1) curl.setopt(pycurl.POSTFIELDS, json.dumps(params)) elif verb == 'DELETE' or verb == 'PUT': curl.setopt(pycurl.CUSTOMREQUEST, verb) curl.setopt(pycurl.HTTPHEADER, ['Transfer-Encoding: chunked']) curl.setopt(pycurl.POSTFIELDS, json.dumps(params)) else: raise Exception('Unsupported HTTP method "%s"' % verb) curl.setopt(pycurl.URL, url) if headers: curl.setopt(pycurl.HTTPHEADER, \ ["%s: %s" % (k, v) for k, v in headers.items()]) bbuf = StringIO.StringIO() hbuf = StringIO.StringIO() curl.setopt(pycurl.WRITEFUNCTION, bbuf.write) curl.setopt(pycurl.HEADERFUNCTION, hbuf.write) if capath: curl.setopt(pycurl.CAPATH, capath) curl.setopt(pycurl.SSL_VERIFYPEER, True) if cainfo: curl.setopt(pycurl.CAINFO, cainfo) else: curl.setopt(pycurl.SSL_VERIFYPEER, False) if ckey: curl.setopt(pycurl.SSLKEY, ckey) if cert: curl.setopt(pycurl.SSLCERT, cert) if verbose: curl.setopt(pycurl.VERBOSE, 1) curl.setopt(pycurl.DEBUGFUNCTION, self.debug) return bbuf, hbuf
def updateInboxElements(self, *elementIds, **updatedParams): """Update given inbox element's (identified by id) with new parameters""" uri = "/" + self.inbox.name + "/_design/WorkQueue/_update/in-place/" optionsArg = {} if "options" in updatedParams: optionsArg.update(updatedParams.pop("options")) data = {"updates": json.dumps(updatedParams), "options": json.dumps(optionsArg)} for ele in elementIds: thisuri = uri + ele + "?" + urllib.urlencode(data) self.inbox.makeRequest(uri=thisuri, type="PUT") return
def updateInboxElements(self, *elementIds, **updatedParams): """Update given inbox element's (identified by id) with new parameters""" uri = "/" + self.inbox.name + "/_design/WorkQueue/_update/in-place/" optionsArg = {} if "options" in updatedParams: optionsArg.update(updatedParams.pop("options")) data = { "updates": json.dumps(updatedParams), "options": json.dumps(optionsArg) } for ele in elementIds: thisuri = uri + ele + "?" + urllib.urlencode(data) self.inbox.makeRequest(uri=thisuri, type='PUT') return
def _makeHash(self, inputdata, hash): """ Turn the input data into json and hash the string. This is simple and means that the input data must be json-serialisable, which is good. """ json_hash = json.dumps(inputdata) return json_hash.__hash__()
def json(self, data): if isinstance(data, GeneratorType): out = ''.join([r for r in self.genstreamer(data)]) return out thunker = JSONThunker() data = thunker.thunk(data) return JsonWrapper.dumps(data)
def splitting(self, requestName): """ _splitting_ Retrieve the current values for splitting parameters for all tasks in the spec. Format them in the manner that the splitting page expects and pass them to the template. """ self.validate(requestName) request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) splittingDict = helper.listJobSplittingParametersByTask(performance = False) taskNames = splittingDict.keys() taskNames.sort() splitInfo = [] for taskName in taskNames: jsonSplittingParams = JsonWrapper.dumps(splittingDict[taskName]) splitInfo.append({"splitAlgo": splittingDict[taskName]["algorithm"], "splitParams": jsonSplittingParams, "taskType": splittingDict[taskName]["type"], "taskName": taskName}) return self.templatepage("Splitting", requestName = requestName, taskInfo = splitInfo, taskNames = taskNames)
def splitting(self, requestName): """ _splitting_ Retrieve the current values for splitting parameters for all tasks in the spec. Format them in the manner that the splitting page expects and pass them to the template. """ self.validate(requestName) request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) splittingDict = helper.listJobSplittingParametersByTask() taskNames = splittingDict.keys() taskNames.sort() splitInfo = [] for taskName in taskNames: jsonSplittingParams = JsonWrapper.dumps(splittingDict[taskName]) splitInfo.append({"splitAlgo": splittingDict[taskName]["algorithm"], "splitParams": jsonSplittingParams, "taskType": splittingDict[taskName]["type"], "taskName": taskName}) return self.templatepage("Splitting", requestName = requestName, taskInfo = splitInfo, taskNames = taskNames)
def test_cjson(self): """ Test cjson implementation. """ try: import cjson except: raise nose.SkipTest json_wrap._module = "cjson" result = json_wrap.dumps(self.record) expect = json.dumps(self.record) self.assertEqual(expect, result) data = result result = json_wrap.loads(data) expect = json.loads(data) self.assertEqual(expect, result) try: json_wrap.loads("bogusbogus") except cjson.DecodeError as ex: self.assertEqual(ex.args, ("cannot parse JSON description: bogusbogus", ))
def updateInboxElements(self, *elementIds, **updatedParams): """Update given inbox element's (identified by id) with new parameters""" uri = "/" + self.inbox.name + "/_design/WorkQueue/_update/in-place/" data = {"updates" : json.dumps(updatedParams)} for ele in elementIds: thisuri = uri + ele + "?" + urllib.urlencode(data) self.inbox.makeRequest(uri = thisuri, type = 'PUT') return
def wrapper (self, data, expires, contentType = "application/json"): data = func (self, data) try: # jsondata = encoder.iterencode(data) jsondata = JsonWrapper.dumps(data) _setCherryPyHeaders(jsondata, contentType, expires) return jsondata except: raise
def wrapper (self, data, expires, contentType = "application/json+thunk"): data = func (self, data) try: thunker = JSONThunker() data = thunker.thunk(data) jsondata = JsonWrapper.dumps(data) _setCherryPyHeaders(jsondata, contentType, expires) return jsondata except: raise
def updateElements(self, *elementIds, **updatedParams): """Update given element's (identified by id) with new parameters""" import urllib uri = "/" + self.db.name + "/_design/WorkQueue/_update/in-place/" data = {"updates": json.dumps(updatedParams)} for ele in elementIds: thisuri = uri + ele + "?" + urllib.urlencode(data) answer = self.db.makeRequest(uri=thisuri, type="PUT") return
def updateElements(self, *elementIds, **updatedParams): """Update given element's (identified by id) with new parameters""" if not elementIds: return uri = "/" + self.db.name + "/_design/WorkQueue/_update/in-place/" data = {"updates": json.dumps(updatedParams)} for ele in elementIds: thisuri = uri + ele + "?" + urllib.urlencode(data) self.db.makeRequest(uri=thisuri, type='PUT') return
def set_opts(self, curl, url, params, headers, ckey=None, cert=None, capath=None, verbose=None, verb='GET', doseq=True, cainfo=None): """Set options for given curl object param needs to be a dictionary in case of GET, while PUT and POST assume it is a string already encoded/quoted with urllib.encode and urllib.quote. """ curl.setopt(pycurl.NOSIGNAL, self.nosignal) curl.setopt(pycurl.TIMEOUT, self.timeout) curl.setopt(pycurl.CONNECTTIMEOUT, self.connecttimeout) curl.setopt(pycurl.FOLLOWLOCATION, self.followlocation) curl.setopt(pycurl.MAXREDIRS, self.maxredirs) if params and verb != 'GET': if isinstance(params, dict): params = json.dumps(params) if verb == 'GET': encoded_data = urllib.urlencode(params, doseq=doseq) url = url + '?' + encoded_data elif verb == 'POST': curl.setopt(pycurl.POST, 1) if params: curl.setopt(pycurl.POSTFIELDS, params) elif verb == 'DELETE' or verb == 'PUT': curl.setopt(pycurl.CUSTOMREQUEST, verb) curl.setopt(pycurl.HTTPHEADER, ['Transfer-Encoding: chunked']) curl.setopt(pycurl.POSTFIELDS, params) else: raise Exception('Unsupported HTTP method "%s"' % verb) curl.setopt(pycurl.URL, url) curl.setopt(pycurl.HTTPHEADER, \ ["%s: %s" % (k, v) for k, v in headers.items()]) bbuf = StringIO.StringIO() hbuf = StringIO.StringIO() curl.setopt(pycurl.WRITEFUNCTION, bbuf.write) curl.setopt(pycurl.HEADERFUNCTION, hbuf.write) if capath: curl.setopt(pycurl.CAPATH, capath) curl.setopt(pycurl.SSL_VERIFYPEER, True) if cainfo: curl.setopt(pycurl.CAINFO, cainfo) else: curl.setopt(pycurl.SSL_VERIFYPEER, False) if ckey: curl.setopt(pycurl.SSLKEY, ckey) if cert: curl.setopt(pycurl.SSLCERT, cert) if verbose: curl.setopt(pycurl.VERBOSE, 1) curl.setopt(pycurl.DEBUGFUNCTION, self.debug) return bbuf, hbuf
def test_string_compare(self): """ Test that cjson and json libraries do the same thing. """ try: import cjson except: raise nose.SkipTest json_wrap._module = "cjson" json_wrap._module = "cjson" cj_result = json_wrap.dumps(self.record) json_wrap._module = "json" dj_result = json_wrap.dumps(self.record) self.assertEqual(dj_result, cj_result) data = dj_result json_wrap._module = "cjson" cj_result = json_wrap.loads(data) json_wrap._module = "json" dj_result = json_wrap.loads(data) self.assertEqual(dj_result, cj_result)
def loadRequestSchema(workload, requestSchema): """ _loadRequestSchema_ Does modifications to the workload I don't understand Takes a WMWorkloadHelper, operates on it directly with the schema """ schema = workload.data.request.section_('schema') for key, value in requestSchema.iteritems(): if type(value) == dict and key == 'LumiList': value = JsonWrapper.dumps(value) try: setattr(schema, key, value) except Exception as ex: # Attach TaskChain tasks if type(value) == dict and requestSchema[ 'RequestType'] == 'TaskChain' and 'Task' in key: newSec = schema.section_(key) for k, v in requestSchema[key].iteritems(): if type(value) == dict and key == 'LumiList': value = JsonWrapper.dumps(value) try: setattr(newSec, k, v) except Exception as ex: # this logging need to change to cherry py logging logging.error("Invalid Value: %s" % str(ex)) else: # this logging need to change to cherry py logging logging.error("Invalid Value: %s" % str(ex)) schema.timeStamp = int(time.time()) schema = workload.data.request.schema # might belong in another method to apply existing schema workload.data.owner.Group = schema.Group workload.data.owner.Requestor = schema.Requestor
def test_json(self): """ Test default json implementation. """ result = json_wrap.dumps(self.record) expect = json.dumps(self.record) self.assertEqual(expect, result) data = result result = json_wrap.loads(data) expect = json.loads(data) self.assertEqual(expect, result) with self.assertRaises(ValueError): json_wrap.loads("bogusbogus")
def test_json(self): """ Test default json implementation. """ json_wrap._module = "json" result = json_wrap.dumps(self.record) expect = json.dumps(self.record) self.assertEqual(expect, result) data = result result = json_wrap.loads(data) expect = json.loads(data) self.assertEqual(expect, result) try: json_wrap.loads("bogusbogus") except ValueError, ex: self.assertEqual(ex.args, ("No JSON object could be decoded: bogusbogus",))
def test_json(self): """ Test default json implementation. """ json_wrap._module = "json" result = json_wrap.dumps(self.record) expect = json.dumps(self.record) self.assertEqual(expect, result) data = result result = json_wrap.loads(data) expect = json.loads(data) self.assertEqual(expect, result) try: json_wrap.loads("bogusbogus") except ValueError as ex: self.assertEqual(ex.args, ("No JSON object could be decoded: bogusbogus",))
def encode_params(self, params, verb, doseq): """ Encode request parameters for usage with the 4 verbs. Assume params is alrady encoded if it is a string and uses a different encoding depending on the HTTP verb (either json.dumps or urllib.urlencode) """ #data is already encoded, just return it if isinstance(params, basestring): return params #data is not encoded, we need to do that if verb in ['GET', 'HEAD']: if params: encoded_data = urllib.urlencode(params, doseq=doseq) else: return '' else: if params: encoded_data = json.dumps(params) else: return {} return encoded_data
def test_cjson(self): """ Test cjson implementation. """ try: import cjson except: raise nose.SkipTest json_wrap._module = "cjson" result = json_wrap.dumps(self.record) expect = json.dumps(self.record) self.assertEqual(expect, result) data = result result = json_wrap.loads(data) expect = json.loads(data) self.assertEqual(expect, result) try: json_wrap.loads("bogusbogus") except cjson.DecodeError as ex: self.assertEqual(ex.args, ("cannot parse JSON description: bogusbogus",))
def putRequestStats(self, request, stats): args = {'requestName': request, 'stats': JsonWrapper.dumps(stats)} callname = 'request' return self._getResult(callname, args=args, verb="PUT")
def putRequestStats(self, request, stats): args = {'requestName': request, 'stats': JsonWrapper.dumps(stats)} callname = 'request' return self._getResult(callname, args = args, verb = "PUT")
def json(self, data): thunker = JSONThunker() data = thunker.thunk(data) return JsonWrapper.dumps(data)
def doRequest(self, schema): schema['CmsPath'] = "/uscmst1/prod/sw/cms" schema['Requestor'] = 'me' schema['Group'] = 'PeopleLikeMe' requestName = schema['RequestName'] self.assertRaises(HTTPException, self.jsonSender.delete, 'request/%s' % requestName) self.assertEqual(self.jsonSender.put('request/%s' % requestName, schema)[1], 200) self.assertEqual(self.jsonSender.get('request/%s' % requestName)[0]['RequestName'], requestName) self.assertTrue(requestName in self.jsonSender.get('user/me')[0]) self.jsonSender.put('request/%s?status=assignment-approved' % requestName) meJSON = self.jsonSender.get('user/me')[0] me = json.loads(meJSON) self.assertTrue(requestName in me['requests']) self.assertEqual(self.jsonSender.put('request/%s?priority=5' % requestName)[1], 200) self.assertEqual(self.jsonSender.post('user/me?priority=6')[1], 200) self.assertEqual(self.jsonSender.post('group/PeopleLikeMe?priority=7')[1], 200) # default priority of group and user of 1 request = self.jsonSender.get('request/%s' % requestName)[0] self.assertEqual(request['ReqMgrRequestBasePriority'], 5) self.assertEqual(request['ReqMgrRequestorBasePriority'], 6) self.assertEqual(request['ReqMgrGroupBasePriority'], 7) self.assertEqual(request['RequestPriority'], 5+6+7) # only certain transitions allowed #self.assertEqual(self.jsonSender.put('request/%s?status=running' % requestName)[1], 400) self.assertRaises(HTTPException, self.jsonSender.put,'request/%s?status=running' % requestName) request = self.jsonSender.get('request/%s' % requestName)[0] self.assertEqual(request['RequestStatus'], 'assignment-approved') self.assertTrue(self.jsonSender.put(urllib.quote('assignment/White Sox/%s' % requestName))[1] == 200) requestsAndSpecs = self.jsonSender.get(urllib.quote('assignment/White Sox'))[0] self.assertTrue(requestName in requestsAndSpecs.keys()) #workloadHelper = WMWorkloadCache.loadFromURL(requestsAndSpecs[requestName]) workloadHelper = WMWorkloadHelper() workloadHelper.load(requestsAndSpecs[requestName]) self.assertEqual(workloadHelper.getOwner()['Requestor'], "me") self.assertTrue(self.jsonSender.get('assignment?request=%s'% requestName)[0] == ['White Sox']) agentUrl = 'http://cmssrv96.fnal.gov/workqueue' self.jsonSender.put('workQueue/%s?url=%s'% (requestName, urllib.quote(agentUrl)) ) self.assertEqual(self.jsonSender.get('workQueue/%s' % requestName)[0][0], agentUrl) request = self.jsonSender.get('request/%s' % requestName)[0] self.assertEqual(request['RequestStatus'], 'acquired') self.jsonSender.post('request/%s?events_written=10&files_merged=1' % requestName) self.jsonSender.post('request/%s?events_written=20&files_merged=2&percent_success=99.9' % requestName) request = self.jsonSender.get('request/%s' % requestName)[0] self.assertEqual(len(request['RequestUpdates']), 2) self.assertEqual(request['RequestUpdates'][0]['files_merged'], 1) self.assertEqual(request['RequestUpdates'][1]['events_written'], 20) self.assertEqual(request['RequestUpdates'][1]['percent_success'], 99.9) message = "The sheriff is near" jsonMessage = json.dumps(message) self.jsonSender.put('message/%s' % requestName, message) messages = self.jsonSender.get('message/%s' % requestName) #self.assertEqual(messages[0][0][0], message) for status in ['running', 'completed']: self.jsonSender.put('request/%s?status=%s' % (requestName, status)) # campaign self.jsonSender.put('campaign/%s' % 'TestCampaign') campaigns = self.jsonSender.get('campaign')[0] self.assertTrue('TestCampaign' in campaigns.keys()) self.jsonSender.put('campaign/%s/%s' % ('TestCampaign', requestName)) requestsInCampaign = self.jsonSender.get('campaign/%s' % 'TestCampaign')[0] self.assertTrue(requestName in requestsInCampaign.keys()) self.jsonSender.delete('request/%s' % requestName)
def getData(self, cachefile, url, inputdata = {}, incoming_headers = {}, encoder = True, decoder = True, verb = 'GET', contentType = None, force_refresh = False): """ Takes the already generated *full* path to cachefile and the url of the resource. Don't need to call self.cacheFileName(cachefile, verb, inputdata) here. If cachefile is StringIO append to that """ verb = self._verbCheck(verb) try: # Get the data if not inputdata: inputdata = self["inputdata"] self['logger'].debug('getData: \n\turl: %s\n\tdata: %s' % \ (url, inputdata)) data, status, reason, from_cache = self["requests"].makeRequest(uri = url, verb = verb, data = inputdata, incoming_headers = incoming_headers, encoder = encoder, decoder = decoder, contentType = contentType) if from_cache: # If it's coming from the cache we don't need to write it to the # second cache, or do we? self['logger'].debug('Data is from the cache') else: # Don't need to prepend the cachepath, the methods calling # getData have done that for us if isfile(cachefile): cachefile.write(str(data)) cachefile.seek (0, 0) # return to beginning of file else: f = open(cachefile, 'w') if isinstance(data, dict) or isinstance(data, list): f.write(json.dumps(data)) else: f.write(str(data)) f.close() except (IOError, HttpLib2Error, HTTPException) as he: # # Overly complicated exception handling. This is due to a request # from *Ops that it is very clear that data is is being returned # from a cachefile, and that cachefiles can be good/stale/dead. # if force_refresh or isfile(cachefile) or not os.path.exists(cachefile): msg = 'The cachefile %s does not exist and the service at %s' msg = msg % (cachefile, self["requests"]['host'] + url) if hasattr(he, 'status') and hasattr(he, 'reason'): msg += ' is unavailable - it returned %s because %s\n' % (he.status, he.reason) if hasattr(he, 'result'): msg += ' with result: %s\n' % he.result else: msg += ' raised a %s when accessed' % he.__repr__() self['logger'].warning(msg) raise he else: cache_dead = cache_expired(cachefile, delta = self.get('maxcachereuse', 24)) if self.get('usestalecache', False) and not cache_dead: # If usestalecache is set the previous version of the cache # file should be returned, with a suitable message in the # log, but no exception raised self['logger'].warning('Returning stale cache data from %s' % cachefile) if hasattr(he, 'status') and hasattr(he, 'reason'): self['logger'].info('%s returned %s because %s' % (he.url, he.status, he.reason)) else: self['logger'].info('%s raised a %s when accessed' % (url, he.__repr__())) else: if cache_dead: msg = 'The cachefile %s is dead (%s hours older than cache ' msg += 'duration), and the service at %s' msg = msg % (cachefile, self.get('maxcachereuse', 24), url) if hasattr(he, 'status') and hasattr(he, 'reason'): msg += ' is unavailable - it returned %s because %s' msg += msg % (he.status, he.reason) else: msg += ' raised a %s when accessed' % he.__repr__() self['logger'].warning(msg) elif self.get('usestalecache', False) == False: # Cache is not dead but Service is configured to not # return stale data. msg = 'The cachefile %s is stale and the service at %s' msg = msg % (cachefile, url) if hasattr(he, 'status') and hasattr(he, 'reason'): msg += ' is unavailable - it returned %s because %s' msg += 'Status: %s \nReason: %s' % (he.status, he.reason) else: msg += ' raised a %s when accessed' % he.__repr__() self['logger'].warning(msg) raise he
def getData(self, cachefile, url, inputdata={}, incoming_headers={}, encoder=True, decoder=True, verb='GET', contentType=None, force_refresh=False): """ Takes the already generated *full* path to cachefile and the url of the resource. Don't need to call self.cacheFileName(cachefile, verb, inputdata) here. If cachefile is StringIO append to that """ verb = self._verbCheck(verb) try: # Get the data if not inputdata: inputdata = self["inputdata"] self['logger'].debug('getData: \n\turl: %s\n\tdata: %s' % \ (url, inputdata)) data, status, reason, from_cache = self["requests"].makeRequest( uri=url, verb=verb, data=inputdata, incoming_headers=incoming_headers, encoder=encoder, decoder=decoder, contentType=contentType) if from_cache: # If it's coming from the cache we don't need to write it to the # second cache, or do we? self['logger'].debug('Data is from the cache') else: # Don't need to prepend the cachepath, the methods calling # getData have done that for us if isfile(cachefile): cachefile.write(str(data)) cachefile.seek(0, 0) # return to beginning of file else: f = open(cachefile, 'w') if isinstance(data, dict) or isinstance(data, list): f.write(json.dumps(data)) else: f.write(str(data)) f.close() except (IOError, HttpLib2Error, HTTPException) as he: # # Overly complicated exception handling. This is due to a request # from *Ops that it is very clear that data is is being returned # from a cachefile, and that cachefiles can be good/stale/dead. # if force_refresh or isfile( cachefile) or not os.path.exists(cachefile): msg = 'The cachefile %s does not exist and the service at %s' msg = msg % (cachefile, self["requests"]['host'] + url) if hasattr(he, 'status') and hasattr(he, 'reason'): msg += ' is unavailable - it returned %s because %s\n' % ( he.status, he.reason) if hasattr(he, 'result'): msg += ' with result: %s\n' % he.result else: msg += ' raised a %s when accessed' % he.__repr__() self['logger'].warning(msg) raise he else: cache_dead = cache_expired(cachefile, delta=self.get('maxcachereuse', 24)) if self.get('usestalecache', False) and not cache_dead: # If usestalecache is set the previous version of the cache # file should be returned, with a suitable message in the # log, but no exception raised self['logger'].warning( 'Returning stale cache data from %s' % cachefile) if hasattr(he, 'status') and hasattr(he, 'reason'): self['logger'].info('%s returned %s because %s' % (he.url, he.status, he.reason)) else: self['logger'].info('%s raised a %s when accessed' % (url, he.__repr__())) else: if cache_dead: msg = 'The cachefile %s is dead (%s hours older than cache ' msg += 'duration), and the service at %s' msg = msg % (cachefile, self.get('maxcachereuse', 24), url) if hasattr(he, 'status') and hasattr(he, 'reason'): msg += ' is unavailable - it returned %s because %s' msg += msg % (he.status, he.reason) else: msg += ' raised a %s when accessed' % he.__repr__() self['logger'].warning(msg) elif self.get('usestalecache', False) == False: # Cache is not dead but Service is configured to not # return stale data. msg = 'The cachefile %s is stale and the service at %s' msg = msg % (cachefile, url) if hasattr(he, 'status') and hasattr(he, 'reason'): msg += ' is unavailable - it returned %s because %s' msg += 'Status: %s \nReason: %s' % (he.status, he.reason) else: msg += ' raised a %s when accessed' % he.__repr__() self['logger'].warning(msg) raise he