def write_to_cache(self, dasquery, expire, url, api, args, gen, ctime): """ Write provided result set into DAS cache. """ if not self.write2cache: return # before going to cache we should check/set possible misses, e.g. # primary key when error is thrown result = self.set_misses(dasquery, api, gen) # update the cache header = dasheader(self.name, dasquery, expire, api, url, services=self.services()) header['lookup_keys'] = self.lookup_keys(api) header['prim_key'] = self.dasmapping.primary_mapkey(self.name, api) header['ctime'] = ctime system = self.name self.localcache.update_cache(dasquery, result, header, system, api) msg = 'cache has been updated,\n' self.logger.debug(msg)
def write_to_cache(self, dasquery, expire, url, api, args, gen, ctime): """ Write provided result set into DAS cache. """ if not self.write2cache: return # before going to cache we should check/set possible misses, e.g. # primary key when error is thrown result = self.set_misses(dasquery, api, gen) # update the cache header = dasheader(self.name, dasquery, expire, api, url, services=self.services()) header["lookup_keys"] = self.lookup_keys(api) header["prim_key"] = self.dasmapping.primary_mapkey(self.name, api) header["ctime"] = ctime self.localcache.update_cache(dasquery, result, header) msg = "cache has been updated,\n" self.logger.debug(msg)
def api(self, dasquery): """ Data service api method, can be defined by data-service class. It parse input query and invoke appropriate data-service API call. All results are stored into the DAS cache along with api call inserted into Analytics DB. """ self.logger.info(dasquery) genrows = self.apimap(dasquery) if not genrows: return jobs = [] for url, api, args, dformat, expire in genrows: # insert DAS query record for given API header = dasheader(self.name, dasquery, expire, api, url) self.localcache.insert_query_record(dasquery, header) # fetch DAS data records if self.multitask: jobs.append(self.taskmgr.spawn(self.apicall, dasquery, url, api, args, dformat, expire)) else: self.apicall(dasquery, url, api, args, dformat, expire) if self.multitask: self.taskmgr.joinall(jobs)
def write_to_cache(self, dasquery, expire, url, api, args, result, ctime): """ Write provided result set into DAS cache. Update analytics db appropriately. """ if not self.write2cache: return self.analytics.add_api(self.name, dasquery.mongo_query, api, args) msg = 'added to Analytics DB' msg += ' query=%s, api=%s, args=%s' % (dasquery, api, args) self.logger.debug(msg) header = dasheader(self.name, dasquery, expire, api, url, ctime) header['lookup_keys'] = self.lookup_keys(api) # check that apicall record is present in analytics DB self.analytics.insert_apicall(self.name, dasquery.mongo_query, url, api, args, expire) # update the cache self.localcache.update_cache(dasquery, result, header) msg = 'cache has been updated,\n' self.logger.debug(msg)
def api(self, dasquery): """ Data service api method, can be defined by data-service class. It parse input query and invoke appropriate data-service API call. All results are stored into the DAS cache along with api call inserted into Analytics DB. """ self.logger.info(dasquery) genrows = self.apimap(dasquery) if not genrows: return jobs = [] for url, api, args, dformat, expire in genrows: # insert DAS query record for given API header = dasheader(self.name, dasquery, expire, api, url) self.localcache.insert_query_record(dasquery, header) # fetch DAS data records if self.multitask: jobs.append(self.taskmgr.spawn(self.apicall, \ dasquery, url, api, args, dformat, expire)) else: self.apicall(dasquery, url, api, args, dformat, expire) if self.multitask: self.taskmgr.joinall(jobs)