def pagination(self, total, incache, kwds): """ Construct pagination part of the page. It accepts total as a total number of result as well as dict of kwargs which contains idx/limit/query/input parameters, as well as other parameters used in URL by end-user. """ kwargs = deepcopy(kwds) if kwargs.has_key('dasquery'): del kwargs['dasquery'] # we don't need it idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) uinput = getarg(kwargs, 'input', '') page = '' if total > 0: params = {} # will keep everything except idx/limit for key, val in kwargs.iteritems(): if key != 'idx' and key != 'limit' and key != 'query': params[key] = val url = "%s/request?%s" \ % (self.base, urllib.urlencode(params, doseq=True)) page += self.templatepage('das_pagination', \ nrows=total, idx=idx, limit=limit, url=url) else: page = self.templatepage('das_noresults', query=uinput, incache=incache) return page
def index(self, *args, **kwargs): """ represents DAS web interface. It uses das_searchform template for input form and yui_table for output Table widget. """ try: if not args and not kwargs: # msg = self.templatepage('das_help', # services = ', '.join(self.dasmgr.keys()), # keywords = ', '.join(self.dasmgr.das_keys()), # operators = ', '.join(das_operators()), # aggregators = ', '.join(das_aggregators()), # filters = ', '.join(das_filters()) # ) page = self.form() return self.page(page) uinput = getarg(kwargs, 'input', '') results = self.check_input(uinput) if results: return self.page(self.form() + results) view = getarg(kwargs, 'view', 'list') if args: return getattr(self, args[0][0])(args[1]) if view not in self.pageviews: raise Exception("Page view '%s' is not supported" % view) return getattr(self, '%sview' % view)(kwargs) except: return self.error(self.gen_error_msg(kwargs))
def __init__(self, config): Cache.__init__(self, config) self.dir = config['filecache_dir'] self.limit = config['filecache_lifetime'] self.base_dir = getarg(config, 'filecache_base_dir', '00') self.files_dir = getarg(config, 'filecache_files_dir', 100) self.logger = config['logger'] self.verbose = config['verbose'] self.logger.info("Init filecache %s" % self.dir) self.systemdict = {} for system in config['systems']: self.systemdict[system] = config[system]['url'] try: os.makedirs(self.dir) except: pass if self.verbose: verbose = True else: verbose = False dbengine = config['filecache_db_engine'] dbfile = None if dbengine.find('sqlite:///') != -1: dbfile = dbengine.replace('sqlite:///', '') self.engine = create_engine(dbengine, echo=False) self.session = sessionmaker(bind=self.engine) if not dbfile: self.create_table() else: # sqlite case if not os.path.isfile(dbfile): self.create_table()
def __init__(self, config): Cache.__init__(self, config) self.dir = config["filecache_dir"] self.limit = config["filecache_lifetime"] self.base_dir = getarg(config, "filecache_base_dir", "00") self.files_dir = getarg(config, "filecache_files_dir", 100) self.logger = config["logger"] self.verbose = config["verbose"] self.logger.info("Init filecache %s" % self.dir) self.systemdict = {} for system in config["systems"]: self.systemdict[system] = config[system]["url"] try: os.makedirs(self.dir) except: pass if self.verbose: verbose = True else: verbose = False dbengine = config["filecache_db_engine"] dbfile = None if dbengine.find("sqlite:///") != -1: dbfile = dbengine.replace("sqlite:///", "") self.engine = create_engine(dbengine, echo=False) self.session = sessionmaker(bind=self.engine) if not dbfile: self.create_table() else: # sqlite case if not os.path.isfile(dbfile): self.create_table()
def records(self, *args, **kwargs): """ Retieve all records id's. """ try: recordid = None if args: recordid = args[0] spec = {'_id':ObjectId(recordid)} fields = None query = dict(fields=fields, spec=spec) elif kwargs and kwargs.has_key('_id'): spec = {'_id': ObjectId(kwargs['_id'])} fields = None query = dict(fields=fields, spec=spec) else: # return all ids query = dict(fields=None, spec={}) res = '' time0 = time.time() idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) coll = kwargs.get('collection', 'merge') inst = kwargs.get('instance', self.dbs_global) form = self.form(uinput="") check, content = self.generate_dasquery(query, inst) if check: return self.page(form + content, ctime=time.time()-time0) dasquery = content # returned content is valid DAS query nresults = self.dasmgr.rawcache.nresults(dasquery, coll) gen = self.dasmgr.rawcache.get_from_cache\ (dasquery, idx=idx, limit=limit, collection=coll) if recordid: # we got id for row in gen: res += das_json(row) else: for row in gen: rid = row['_id'] del row['_id'] res += self.templatepage('das_record', \ id=rid, collection=coll, daskeys=', '.join(row)) if recordid: page = res else: url = '/das/records?' if nresults: page = self.templatepage('das_pagination', \ nrows=nresults, idx=idx, limit=limit, url=url) else: page = 'No results found, nresults=%s' % nresults page += res ctime = (time.time()-time0) page = self.page(form + page, ctime=ctime) return page except Exception as exc: print_exc(exc) return self.error(gen_error_msg(kwargs))
def __init__(self, config=None, query=None, sleep=600): self.dascore = DASCore(config, nores=True) logdir = getarg(config, 'logdir', '/tmp') self.pidfile = os.path.join(logdir, 'robot-%s.pid' % genkey(query)) if (hasattr(os, "devnull")): devnull = os.devnull else: devnull = "/dev/null" self.stdin = devnull # we do not read from stdinput self.stdout = getarg(config, 'stdout', devnull) self.stderr = getarg(config, 'stderr', devnull) self.query = query self.sleep = sleep
def create(self, *args, **kwargs): """ HTTP POST request. Requests the server to create a new resource using the data enclosed in the request body. Creates new entry in DAS cache for provided query. """ data = {'server_method': 'create'} if 'query' in kwargs: query = kwargs['query'] self.logdb(query) query = self.dascore.mongoparser.parse(query) expire = getarg(kwargs, 'expire', 600) try: status = self.cachemgr.add(query, expire) data.update({ 'status': status, 'query': query, 'expire': expire }) except: data.update({ 'exception': traceback.format_exc(), 'status': 'fail' }) else: data.update({ 'status': 'fail', 'reason': 'Unsupported keys %s' % kwargs.keys() }) return data
def replace(self, *args, **kwargs): """ HTTP PUT request. Requests the server to replace an existing resource with the one enclosed in the request body. Replace existing query in DAS cache. """ data = {'server_method':'replace'} if kwargs.has_key('query'): query = kwargs['query'] self.logdb(query) query = self.dascore.mongoparser.parse(query) try: self.dascore.remove_from_cache(query) except: msg = traceback.format_exc() data.update({'status':'fail', 'query':query, 'exception':msg}) return data expire = getarg(kwargs, 'expire', 600) try: status = self.cachemgr.add(query, expire) data.update({'status':status, 'query':query, 'expire':expire}) except: data.update({'status':'fail', 'query':query, 'exception':traceback.format_exc()}) else: data.update({'status': 'fail', 'reason': 'Unsupported keys %s' % kwargs.keys() }) return data
def pagination(self, head): """ Construct pagination part of the page. It accepts total as a total number of result as well as dict of kwargs which contains idx/limit/query/input parameters, as well as other parameters used in URL by end-user. """ kwds = head.get('args') total = head.get('nresults') apilist = head.get('apilist') kwargs = deepcopy(kwds) if 'dasquery' in kwargs: del kwargs['dasquery'] # we don't need it idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 50) uinput = getarg(kwargs, 'input', '') skip_args = ['status', 'error', 'reason'] page = '' if datasetPattern(uinput): msg = 'By default DAS show dataset with <b>VALID</b> status. ' msg += 'To query all datasets regardless of their status please use' msg += '<span class="example">dataset %s status=*</span> query' % uinput msg += ' or use proper status value, e.g. PRODUCTION' page += '<div>%s</div><br/>' % msg if total and total > 0: params = {} # will keep everything except idx/limit for key, val in kwargs.items(): if key in skip_args: continue if key != 'idx' and key != 'limit' and key != 'query': params[key] = val url = "%s/request?%s" \ % (self.base, urllib.urlencode(params, doseq=True)) page += self.templatepage('das_pagination', \ nrows=total, idx=idx, limit=limit, url=url, cgi=cgi, str=str) else: # distinguish the case when no results vs no API calls info = head.get('das_server', None) info = pprint.pformat(info) if info else None page = self.templatepage('das_noresults', query=uinput, time=time, status=head.get('status', None), reason=head.get('reason', None), info=info, apilist=head.get('apilist', None)) return page
def index(self, *args, **kwargs): """ represents DAS web interface. It uses das_searchform template for input form and yui_table for output Table widget. """ uinput = getarg(kwargs, 'input', '') return self.page(self.form(uinput=uinput, cards=True))
def request(self, *args, **kwargs): """ HTTP GET request. Retrieve results from DAS cache. """ data = {'server_method': 'request'} if 'query' in kwargs: query = kwargs['query'] self.logdb(query) query = self.dascore.mongoparser.parse(query) idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 0) skey = getarg(kwargs, 'skey', '') order = getarg(kwargs, 'order', 'asc') data.update({ 'status': 'requested', 'idx': idx, 'limit': limit, 'query': query, 'skey': skey, 'order': order }) # if self.dascore.in_raw_cache(query): res = self.dascore.result(query, idx, limit) if type(res) is types.GeneratorType: result = [] for item in res: if item not in result: result.append(item) data['data'] = result tot = len(data['data']) else: data['data'] = res tot = 1 data['status'] = 'success' data['nresults'] = tot # else: # data['status'] = 'not found' else: data.update({ 'status': 'fail', 'reason': 'Unsupported keys %s' % kwargs.keys() }) return data
def tableview(self, kwargs): """ provide DAS table view """ kwargs['format'] = 'html' uinput = getarg(kwargs, 'input', '') ajaxreq = getarg(kwargs, 'ajax', 0) form = self.form(uinput=uinput) time0 = time.time() total = self.nresults(kwargs) if not total: ctime = (time.time() - time0) form = self.form(uinput) page = self.templatepage('not_ready') page = self.page(form + page, ctime=ctime) return page # find out which selection keys were used selkeys = uinput.replace('find ', '').split(' where ')[0].split(',') uikeys = [] for key in selkeys: res = self.dasmapping.presentation(key) uikeys += [item['ui'] for item in res] titles = ["id"] + uikeys coldefs = "" for title in titles: coldefs += '{key:"%s",label:"%s",sortable:true,resizeable:true},' \ % (title, title) coldefs = "[%s]" % coldefs[:-1] # remove last comma coldefs = coldefs.replace("},{", "},\n{") limit = getarg(kwargs, 'limit', 10) names = { 'titlelist': titles, 'coldefs': coldefs, 'rowsperpage': limit, 'total': total, 'tag': 'mytag', 'ajax': ajaxreq, 'input': urllib.urlencode(dict(input=uinput)) } page = self.templatepage('das_table', **names) ctime = (time.time() - time0) page = self.page(form + page, ctime=ctime) return page
def tableview(self, kwargs): """ provide DAS table view """ kwargs["format"] = "html" uinput = getarg(kwargs, "input", "") ajaxreq = getarg(kwargs, "ajax", 0) form = self.form(uinput=uinput) time0 = time.time() total = self.nresults(kwargs) if not total: ctime = time.time() - time0 form = self.form(uinput) page = self.templatepage("not_ready") page = self.page(form + page, ctime=ctime) return page # find out which selection keys were used selkeys = uinput.replace("find ", "").split(" where ")[0].split(",") uikeys = [] for key in selkeys: res = self.dasmapping.presentation(key) uikeys += [item["ui"] for item in res] titles = ["id"] + uikeys coldefs = "" for title in titles: coldefs += '{key:"%s",label:"%s",sortable:true,resizeable:true},' % (title, title) coldefs = "[%s]" % coldefs[:-1] # remove last comma coldefs = coldefs.replace("},{", "},\n{") limit = getarg(kwargs, "limit", 10) names = { "titlelist": titles, "coldefs": coldefs, "rowsperpage": limit, "total": total, "tag": "mytag", "ajax": ajaxreq, "input": urllib.urlencode(dict(input=uinput)), } page = self.templatepage("das_table", **names) ctime = time.time() - time0 page = self.page(form + page, ctime=ctime) return page
def records(self, *args, **kwargs): """ HTTP GET request. Retrieve records from provided collection. """ data = {'server_method': 'request'} if 'query' not in kwargs: data['status'] = 'fail' data['reason'] = 'no query is provided' return data # input query in JSON format, we should decode it using json. query = json.loads(kwargs.get('query')) coll = kwargs.get('collection', 'merge') idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) # getarg perfrom type convertion count = kwargs.get('count', 0) data.update({ 'status': 'requested', 'query': kwargs['query'], 'collection': coll, 'count': count }) if '_id' in query['spec']: recid = query['spec']['_id'] ids = [] if type(recid) is bytes: ids = [ObjectId(recid)] elif type(recid) is list: ids = [ObjectId(r) for r in recid] spec = {'spec': {'_id': {'$in': ids}}} else: # look-up all records spec = {} self.logdb(query) try: gen = self.dascore.rawcache.get_from_cache\ (spec, idx=idx, limit=limit, collection=coll, adjust=False) data['status'] = 'success' data['data'] = [r for r in gen] except: self.debug(traceback.format_exc()) data['status'] = 'fail' data['reason'] = sys.exc_info()[0] return data
def send_request(self, method, kwargs): "Send POST request to server with provided parameters" url = self.cachesrv uinput = getarg(kwargs, 'input', '') format = getarg(kwargs, 'format', '') idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) skey = getarg(kwargs, 'sort', '') sdir = getarg(kwargs, 'dir', 'asc') params = { 'query': uinput, 'idx': idx, 'limit': limit, 'skey': skey, 'order': sdir } if method == 'POST': path = '/rest/create' elif method == 'GET': path = '/rest/request' else: raise Exception('Unsupported method %s' % method) headers = { 'Accept': 'application/json', 'Content-type': 'application/json' } try: data = urllib2_request(method, url + path, params, headers=headers) result = json.loads(data) except: self.daslogger.error(traceback.format_exc()) result = {'status': 'fail', 'reason': traceback.format_exc()} return result
def send_request(self, method, kwargs): "Send POST request to server with provided parameters" url = self.cachesrv uinput = getarg(kwargs, 'input', '') format = getarg(kwargs, 'format', '') idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) skey = getarg(kwargs, 'sort', '') sdir = getarg(kwargs, 'dir', 'asc') params = {'query':uinput, 'idx':idx, 'limit':limit, 'skey':skey, 'order':sdir} if method == 'POST': path = '/rest/create' elif method == 'GET': path = '/rest/request' else: raise Exception('Unsupported method %s' % method) headers = {'Accept': 'application/json', 'Content-type': 'application/json'} try: data = urllib2_request(method, url+path, params, headers=headers) result = json.loads(data) except: self.daslogger.error(traceback.format_exc()) result = {'status':'fail', 'reason':traceback.format_exc()} return result
def tableview(self, kwargs): """ provide DAS table view """ kwargs['format'] = 'html' uinput = getarg(kwargs, 'input', '') ajaxreq = getarg(kwargs, 'ajax', 0) form = self.form(uinput=uinput) time0 = time.time() total = self.nresults(kwargs) if not total: ctime = (time.time()-time0) form = self.form(uinput) page = self.templatepage('not_ready') page = self.page(form + page, ctime=ctime) return page # find out which selection keys were used selkeys = uinput.replace('find ', '').split(' where ')[0].split(',') uikeys = [] for key in selkeys: res = self.dasmapping.presentation(key) uikeys += [item['ui'] for item in res] titles = ["id"] + uikeys coldefs = "" for title in titles: coldefs += '{key:"%s",label:"%s",sortable:true,resizeable:true},' \ % (title, title) coldefs = "[%s]" % coldefs[:-1] # remove last comma coldefs = coldefs.replace("},{","},\n{") limit = getarg(kwargs, 'limit', 10) names = {'titlelist':titles, 'coldefs':coldefs, 'rowsperpage':limit, 'total':total, 'tag':'mytag', 'ajax':ajaxreq, 'input':urllib.urlencode(dict(input=uinput))} page = self.templatepage('das_table', **names) ctime = (time.time()-time0) page = self.page(form + page, ctime=ctime) return page
def records(self, *args, **kwargs): """ HTTP GET request. Retrieve records from provided collection. """ data = {'server_method':'request'} if not kwargs.has_key('query'): data['status'] = 'fail' data['reason'] = 'no query is provided' return data # input query in JSON format, we should decode it using json. query = json.loads(kwargs.get('query')) coll = kwargs.get('collection', 'merge') idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) # getarg perfrom type convertion count = kwargs.get('count', 0) data.update({'status':'requested', 'query':kwargs['query'], 'collection':coll, 'count': count}) if query['spec'].has_key('_id'): recid = query['spec']['_id'] ids = [] if type(recid) is types.StringType: ids = [ObjectId(recid)] elif type(recid) is types.ListType: ids = [ObjectId(r) for r in recid] spec = {'spec':{'_id':{'$in':ids}}} else: # look-up all records spec = {} self.logdb(query) try: gen = self.dascore.rawcache.get_from_cache\ (spec, idx=idx, limit=limit, collection=coll, adjust=False) data['status'] = 'success' data['data'] = [r for r in gen] except: self.debug(traceback.format_exc()) data['status'] = 'fail' data['reason'] = sys.exc_type return data
def pagination(self, head): """ Construct pagination part of the page. It accepts total as a total number of result as well as dict of kwargs which contains idx/limit/query/input parameters, as well as other parameters used in URL by end-user. """ kwds = head.get('args') total = head.get('nresults') apilist = head.get('apilist') kwargs = deepcopy(kwds) if 'dasquery' in kwargs: del kwargs['dasquery'] # we don't need it idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) uinput = getarg(kwargs, 'input', '') skip_args = ['status', 'error', 'reason'] page = '' if total > 0: params = {} # will keep everything except idx/limit for key, val in kwargs.iteritems(): if key in skip_args: continue if key != 'idx' and key != 'limit' and key != 'query': params[key] = val url = "%s/request?%s" \ % (self.base, urllib.urlencode(params, doseq=True)) page += self.templatepage('das_pagination', \ nrows=total, idx=idx, limit=limit, url=url) else: # distinguish the case when no results vs no API calls info = head.get('das_server', None) info = pprint.pformat(info) if info else None page = self.templatepage('das_noresults', query=uinput, status=head.get('status', None), reason=head.get('reason', None), info=info, apilist=head.get('apilist', None)) return page
def request(self, *args, **kwargs): """ HTTP GET request. Retrieve results from DAS cache. """ data = {'server_method':'request'} if kwargs.has_key('query'): query = kwargs['query'] self.logdb(query) query = self.dascore.mongoparser.parse(query) idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 0) skey = getarg(kwargs, 'skey', '') order = getarg(kwargs, 'order', 'asc') data.update({'status':'requested', 'idx':idx, 'limit':limit, 'query':query, 'skey':skey, 'order':order}) # if self.dascore.in_raw_cache(query): res = self.dascore.result(query, idx, limit) if type(res) is types.GeneratorType: result = [] for item in res: if item not in result: result.append(item) data['data'] = result tot = len(data['data']) else: data['data'] = res tot = 1 data['status'] = 'success' data['nresults'] = tot # else: # data['status'] = 'not found' else: data.update({'status': 'fail', 'reason': 'Unsupported keys %s' % kwargs.keys() }) return data
def get_data(self, kwargs): """ Invoke DAS workflow and get data from the cache. """ head = dict(timestamp=time.time()) head['args'] = kwargs uinput = kwargs.get('input', '') inst = kwargs.get('instance', self.dbs_global) idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 0) # do not impose limit coll = kwargs.get('collection', 'merge') dasquery = kwargs.get('dasquery', None) time0 = time.time() if dasquery: dasquery = DASQuery(dasquery, instance=inst) else: check, content = \ self.generate_dasquery(uinput, inst, html_error=False) if check: head.update({'status': 'fail', 'reason': content, 'ctime': time.time()-time0, 'input': uinput}) data = [] return head, data dasquery = content # returned content is valid DAS query try: nres = self.dasmgr.nresults(dasquery, coll) data = \ self.dasmgr.get_from_cache(dasquery, idx, limit) head.update({'status':'ok', 'nresults':nres, 'ctime': time.time()-time0, 'dasquery': dasquery}) except Exception as exc: print_exc(exc) head.update({'status': 'fail', 'reason': str(exc), 'ctime': time.time()-time0, 'dasquery': dasquery}) data = [] head.update({'incache':self.dasmgr.incache(dasquery, coll='cache')}) return head, data
def replace(self, *args, **kwargs): """ HTTP PUT request. Requests the server to replace an existing resource with the one enclosed in the request body. Replace existing query in DAS cache. """ data = {'server_method': 'replace'} if 'query' in kwargs: query = kwargs['query'] self.logdb(query) query = self.dascore.mongoparser.parse(query) try: self.dascore.remove_from_cache(query) except: msg = traceback.format_exc() data.update({ 'status': 'fail', 'query': query, 'exception': msg }) return data expire = getarg(kwargs, 'expire', 600) try: status = self.cachemgr.add(query, expire) data.update({ 'status': status, 'query': query, 'expire': expire }) except: data.update({ 'status': 'fail', 'query': query, 'exception': traceback.format_exc() }) else: data.update({ 'status': 'fail', 'reason': 'Unsupported keys %s' % kwargs.keys() }) return data
def nresults(self, kwargs): """ invoke DAS search call, parse results and return them to web methods """ url = self.cachesrv uinput = getarg(kwargs, "input", "") params = {"query": uinput} path = "/rest/nresults" headers = {"Accept": "application/json"} try: data = urllib2_request("GET", url + path, params, headers=headers) record = json.loads(data) except: self.daslogger.error(traceback.format_exc()) record = {"status": "fail", "reason": traceback.format_exc()} if record["status"] == "success": return record["nresults"] else: msg = "nresults returns status: %s" % str(record) self.daslogger.info(msg) return -1
def nresults(self, kwargs): """ invoke DAS search call, parse results and return them to web methods """ url = self.cachesrv uinput = getarg(kwargs, 'input', '') params = {'query':uinput} path = '/rest/nresults' headers = {"Accept": "application/json"} try: data = urllib2_request('GET', url+path, params, headers=headers) record = json.loads(data) except: self.daslogger.error(traceback.format_exc()) record = {'status':'fail', 'reason':traceback.format_exc()} if record['status'] == 'success': return record['nresults'] else: msg = "nresults returns status: %s" % str(record) self.daslogger.info(msg) return -1
def nresults(self, kwargs): """ invoke DAS search call, parse results and return them to web methods """ url = self.cachesrv uinput = getarg(kwargs, 'input', '') params = {'query': uinput} path = '/rest/nresults' headers = {"Accept": "application/json"} try: data = urllib2_request('GET', url + path, params, headers=headers) record = json.loads(data) except: self.daslogger.error(traceback.format_exc()) record = {'status': 'fail', 'reason': traceback.format_exc()} if record['status'] == 'success': return record['nresults'] else: msg = "nresults returns status: %s" % str(record) self.daslogger.info(msg) return -1
def create(self, *args, **kwargs): """ HTTP POST request. Requests the server to create a new resource using the data enclosed in the request body. Creates new entry in DAS cache for provided query. """ data = {'server_method':'create'} if kwargs.has_key('query'): query = kwargs['query'] self.logdb(query) query = self.dascore.mongoparser.parse(query) expire = getarg(kwargs, 'expire', 600) try: status = self.cachemgr.add(query, expire) data.update({'status':status, 'query':query, 'expire':expire}) except: data.update({'exception':traceback.format_exc(), 'status':'fail'}) else: data.update({'status': 'fail', 'reason': 'Unsupported keys %s' % kwargs.keys() }) return data
def send_request(self, method, kwargs): "Send POST request to server with provided parameters" url = self.cachesrv uinput = getarg(kwargs, "input", "") format = getarg(kwargs, "format", "") idx = getarg(kwargs, "idx", 0) limit = getarg(kwargs, "limit", 10) skey = getarg(kwargs, "sort", "") sdir = getarg(kwargs, "dir", "asc") params = {"query": uinput, "idx": idx, "limit": limit, "skey": skey, "order": sdir} if method == "POST": path = "/rest/create" elif method == "GET": path = "/rest/request" else: raise Exception("Unsupported method %s" % method) headers = {"Accept": "application/json", "Content-type": "application/json"} try: data = urllib2_request(method, url + path, params, headers=headers) result = json.loads(data) except: self.daslogger.error(traceback.format_exc()) result = {"status": "fail", "reason": traceback.format_exc()} return result
def yuijson(self, **kwargs): """ Provide JSON in YUI compatible format to be used in DynamicData table widget, see http://developer.yahoo.com/yui/examples/datatable/dt_dynamicdata.html """ rows = self.result(kwargs) rowlist = [] id = 0 for row in rows: das = row['das'] if type(das) is dict: das = [das] resdict = {} for jdx in range(0, len(das)): item = das[jdx] resdict[id] = id for idx in range(0, len(item['system'])): api = item['api'][idx] system = item['system'][idx] key = item['selection_keys'][idx] data = row[key] if type(data) is list: data = data[jdx] if type(data) is list: data = data[idx] # I need to extract from DAS object the values for UI keys for item in self.dasmapping.presentation(key): daskey = item['das'] uiname = item['ui'] if uiname not in resdict: resdict[uiname] = "" # look at key attributes, which may be compound as well # e.g. block.replica.se if type(data) is dict: result = dict(data) elif type(data) is list: result = list(data) else: result = data res = "" try: for elem in daskey.split('.')[1:]: if elem in result: res = result[elem] resdict[uiname] = res except: pass # pad = "" # jsoncode = {'jsoncode': json2html(data, pad)} # jsonhtml = self.templatepage('das_json', **jsoncode) # jsondict = {'id':id, 'system':system, 'api':api, key:jsonhtml} if resdict not in rowlist: rowlist.append(resdict) id += 1 idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) total = len(rowlist) jsondict = { 'recordsReturned': len(rowlist), 'totalRecords': total, 'startIndex': idx, 'sort': 'true', 'dir': 'asc', 'pageSize': limit, 'records': rowlist } return jsondict
def tableview(self, head, data): """ Represent data in tabular view. """ kwargs = head.get('args') total = head.get('nresults', 0) apilist = head.get('apilist') dasquery = head.get('dasquery') filters = dasquery.filters sdir = getarg(kwargs, 'dir', '') titles = [] page = self.pagination(head) fltbar = self.filter_bar(dasquery) if filters: for flt in filters: if flt.find('=') != -1 or flt.find('>') != -1 or \ flt.find('<') != -1: continue titles.append(flt) style = 1 tpage = "" pkey = None status = head.get('status', None) if status == 'fail': reason = head.get('reason', '') if reason: page += '<br/><span class="box_red">%s</span>' % reason for row in data: if not fltbar: fltbar = self.fltpage(dasquery) try: # we don't need to show qhash in table view del row['qhash'] except: pass rec = [] if not pkey and 'das' in row and 'primary_key' in row['das']: pkey = row['das']['primary_key'].split('.')[0] if filters: for flt in filters: rec.append(DotDict(row).get(flt)) else: gen = self.convert2ui(row) titles = [] for uikey, val, _link, _desc, _examples in gen: skip = 0 if not filters: if uikey in titles: skip = 1 else: titles.append(uikey) if not skip: rec.append(val) if style: style = 0 else: style = 1 link = '<a href="/das/records/%s?collection=merge">link</a>' \ % quote(str(row['_id'])) # cgi.escape the id tpage += self.templatepage('das_table_row', rec=rec, tag='td', \ style=style, encode=1, record=link) if sdir == 'asc': sdir = 'desc' elif sdir == 'desc': sdir = 'asc' else: # default sort direction sdir = 'asc' theads = [] for title in titles: theads.append(title) theads.append('Record') thead = self.templatepage('das_table_row', rec=theads, tag='th', \ style=0, encode=0, record=0) page += fltbar page += '<br />' page += '<table class="das_table">' + thead + tpage + '</table>' page += '<br />' page += '<div align="right">DAS cache server time: %5.3f sec</div>' \ % head['ctime'] return page
def get_data(self, kwargs): """ Invoke DAS workflow and get data from the cache. """ head = dict(timestamp=time.time()) head['args'] = kwargs uinput = kwargs.get('input', '') inst = kwargs.get('instance', self.dbs_global) idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 0) # do not impose limit coll = kwargs.get('collection', 'merge') status = kwargs.get('status') error = kwargs.get('error') reason = kwargs.get('reason') dasquery = kwargs.get('dasquery', None) time0 = time.time() if dasquery: dasquery = DASQuery(dasquery, instance=inst) if dasquery.error: return self.page(form + dasquery.error, ctime=time.time()-time0) else: check, content = \ self.generate_dasquery(uinput, inst, html_mode=False) if check: head.update({'status': 'fail', 'reason': content, 'ctime': time.time()-time0, 'input': uinput}) data = [] return head, data dasquery = content # returned content is valid DAS query try: nres = self.dasmgr.nresults(dasquery, coll) data = \ self.dasmgr.get_from_cache(dasquery, idx, limit) # check that we got what we expected data = [r for r in data] if nres and not len(data): for retry in range(1, 3, 5): msg = 'retry in %s sec' % retry dasprint(dastimestamp('DAS WARNING '), msg, dasquery) time.sleep(retry) # retry one more time data = \ self.dasmgr.get_from_cache(dasquery, idx, limit) data = [r for r in data] if len(data): break if nres and not len(data): msg = 'fail to get all data for %s, nres=%s, len(data)=%s' \ % (dasquery, nres, len(data)) dasprint(dastimestamp('DAS WARNING '), msg) status = 'fail' reason = 'Fail to retrieve data from DAS cache, please retry' if dasquery.aggregators: # aggregators split DAS record into sub-system and then # apply aggregator functions, therefore we need to correctly # account for nresults. Resolve generator into list and take # its length as nresults value. data = [r for r in data] nres = len(data) if error: # DAS record contains an error status = 'error' head.update({'status':status, 'nresults':nres, 'ctime': time.time()-time0, 'dasquery': dasquery}) except Exception as exc: status = 'fail' reason = str(exc) print_exc(exc) head.update({'status': status, 'ctime': time.time()-time0, 'dasquery': dasquery}) data = [] head.update({'incache':self.dasmgr.incache(dasquery, coll='cache'), 'apilist':self.dasmgr.apilist(dasquery)}) if reason: head.update({'reason': reason}) if status != 'ok': head.update(self.info()) # check if query had dataset input and returned no results # then run hint functions to find dataset in other DBS instances mquery = dasquery.mongo_query empty = False for item in data: if 'dataset.name' in mquery['spec'] and 'dataset' in mquery['fields'] \ and 'result' not in item: if not item['dataset']: empty = True break if empty: # if no results found add dataset from other DBS instances hints = self.hint_datasets(kwargs) for item in data: item.update({'hints': hints}) return head, data
def records(self, *args, **kwargs): """ Retieve all records id's. """ try: recordid = None format = '' if args: recordid = args[0] spec = {'_id': recordid} fields = None query = dict(fields=fields, spec=spec) if len(args) == 2: format = args[1] elif kwargs and '_id' in kwargs: spec = {'_id': kwargs['_id']} fields = None query = dict(fields=fields, spec=spec) else: # return all ids query = dict(fields=None, spec={}) nresults = self.nresults(query) time0 = time.time() url = self.cachesrv idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) show = getarg(kwargs, 'show', 'json') coll = getarg(kwargs, 'collection', 'merge') # params = {'query':json.dumps(query), 'idx':idx, 'limit':limit} # path = '/rest/request' params = { 'query': json.dumps(query), 'idx': idx, 'limit': limit, 'collection': coll } path = '/rest/records' headers = {"Accept": "application/json"} try: data = urllib2_request('GET', url + path, params, headers=headers) result = json.loads(data) except: self.daslogger.error(traceback.format_exc()) result = {'status': 'fail', 'reason': traceback.format_exc()} res = "" if result['status'] == 'success': if recordid: # we got id for row in result['data']: if show == 'json': jsoncode = {'jsoncode': json2html(row, "")} res += self.templatepage('das_json', **jsoncode) elif show == 'code': code = pformat(row, indent=1, width=100) res += self.templatepage('das_code', code=code) else: code = yaml.dump(row, width=100, indent=4, default_flow_style=False) res += self.templatepage('das_code', code=code) else: for row in result['data']: rid = row['_id'] del row['_id'] record = dict(id=rid, daskeys=', '.join(row)) res += self.templatepage('das_record', **record) else: res = result['status'] if 'reason' in res: return self.error(res['reason']) else: msg = 'Uknown error, kwargs=' % kwargs return self.error(msg) if recordid: if format: if format == 'xml': return self.wrap2dasxml(result['data']) elif format == 'json': return self.wrap2dasjson(result['data']) else: return self.error('Unsupported data format %s' % format) page = res else: url = '/das/records?' idict = dict(nrows=nresults, idx=idx, limit=limit, results=res, url=url) page = self.templatepage('das_pagination', **idict) form = self.form(uinput="") ctime = (time.time() - time0) page = self.page(form + page, ctime=ctime) return page except: return self.error(self.gen_error_msg(kwargs))
def apimap(self, dasquery): """ Analyze input query and yield url, api, args, format, expire for further processing. """ cond = getarg(dasquery.mongo_query, 'spec', {}) instance = dasquery.mongo_query.get('instance', self.dbs_global) skeys = getarg(dasquery.mongo_query, 'fields', []) if not skeys: skeys = [] self.logger.info("\n") for api, value in self.map.iteritems(): expire = value['expire'] iformat = value['format'] url = self.adjust_url(value['url'], instance) args = dict(value['params']) # make new copy, since we'll adjust wild = value.get('wild_card', '*') found = 0 for key, val in cond.iteritems(): # check if key is a special one if key in das_special_keys(): found += 1 # check if keys from conditions are accepted by API. if self.dasmapping.check_dasmap(self.name, api, key, val): # need to convert key (which is daskeys.map) into # input api parameter for apiparam in \ self.dasmapping.das2api(self.name, key, val, api): if args.has_key(apiparam): args[apiparam] = val found += 1 else: found = 0 break # condition key does not map into API params self.adjust_params(api, args, instance) if not found: msg = "--- rejects API %s, parameters don't match" % api self.logger.info(msg) msg = 'args=%s' % args self.logger.debug(msg) continue # delete args keys whose value is optional delete_keys(args, 'optional') # check that there is no "required" parameter left in args, # since such api will not work if 'required' in args.values(): msg = '--- rejects API %s, parameter is required' % api self.logger.info(msg) msg = 'args=%s' % args self.logger.debug(msg) continue # adjust pattern symbols in arguments if wild != '*': for key, val in args.iteritems(): if isinstance(val, str) or isinstance(val, unicode): val = val.replace('*', wild) args[key] = val prim_key = self.dasmapping.primary_key(self.name, api) if prim_key not in skeys: msg = "--- rejects API %s, primary_key %s is not selected"\ % (api, prim_key) self.logger.info(msg) continue msg = '+++ %s passes API %s' % (self.name, api) self.logger.info(msg) msg = 'args=%s' % args self.logger.debug(msg) msg = "yield " msg += "system ***%s***, url=%s, api=%s, args=%s, format=%s, " \ % (self.name, url, api, args, iformat) msg += "expire=%s, wild_card=%s" \ % (expire, wild) self.logger.debug(msg) yield url, api, args, iformat, expire
def records(self, *args, **kwargs): """ Retieve all records id's. """ try: recordid = None if args: recordid = args[0] spec = {'_id':ObjectId(recordid)} fields = None query = dict(fields=fields, spec=spec) elif kwargs and '_id' in kwargs: spec = {'_id': ObjectId(kwargs['_id'])} fields = None query = dict(fields=fields, spec=spec) else: # return all ids query = dict(fields=None, spec={}) res = '' time0 = time.time() idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 50) coll = kwargs.get('collection', 'merge') view = kwargs.get('view', '') if view == 'json': res = [] inst = kwargs.get('instance', self.dbs_global) form = self.form(uinput="") check, content = self.generate_dasquery(query, inst) if check: return self.page(form + content, ctime=time.time()-time0) dasquery = content # returned content is valid DAS query nresults = self.dasmgr.rawcache.nresults(dasquery, coll) gen = self.dasmgr.rawcache.get_from_cache\ (dasquery, idx=idx, limit=limit, collection=coll) if recordid: # we got id for row in gen: if view == 'json': res.append(row) else: res += das_json(dasquery, row) else: for row in gen: rid = row['_id'] del row['_id'] res += self.templatepage('das_record', \ id=rid, collection=coll, daskeys=', '.join(row)) if recordid: page = res else: url = '/das/records?' if nresults: page = self.templatepage('das_pagination', \ nrows=nresults, idx=idx, limit=limit, url=url, \ cgi=cgi, str=str) else: page = 'No results found, nresults=%s' % nresults page += res ctime = (time.time()-time0) if view == 'json': return json.dumps(res) page = self.page(form + page, ctime=ctime) return page except Exception as exc: print_exc(exc) return self.error(gen_error_msg(kwargs))
def records(self, *args, **kwargs): """ Retieve all records id's. """ try: recordid = None format = '' if args: recordid = args[0] spec = {'_id':recordid} fields = None query = dict(fields=fields, spec=spec) if len(args) == 2: format = args[1] elif kwargs and '_id' in kwargs: spec = {'_id': kwargs['_id']} fields = None query = dict(fields=fields, spec=spec) else: # return all ids query = dict(fields=None, spec={}) nresults = self.nresults(query) time0 = time.time() url = self.cachesrv idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) show = getarg(kwargs, 'show', 'json') coll = getarg(kwargs, 'collection', 'merge') # params = {'query':json.dumps(query), 'idx':idx, 'limit':limit} # path = '/rest/request' params = {'query':json.dumps(query), 'idx':idx, 'limit':limit, 'collection':coll} path = '/rest/records' headers = {"Accept": "application/json"} try: data = urllib2_request('GET', url+path, params, headers=headers) result = json.loads(data) except: self.daslogger.error(traceback.format_exc()) result = {'status':'fail', 'reason':traceback.format_exc()} res = "" if result['status'] == 'success': if recordid: # we got id for row in result['data']: if show == 'json': jsoncode = {'jsoncode': json2html(row, "")} res += self.templatepage('das_json', **jsoncode) elif show == 'code': code = pformat(row, indent=1, width=100) res += self.templatepage('das_code', code=code) else: code = yaml.dump(row, width=100, indent=4, default_flow_style=False) res += self.templatepage('das_code', code=code) else: for row in result['data']: rid = row['_id'] del row['_id'] record = dict(id=rid, daskeys=', '.join(row)) res += self.templatepage('das_record', **record) else: res = result['status'] if 'reason' in res: return self.error(res['reason']) else: msg = 'Uknown error, kwargs=' % kwargs return self.error(msg) if recordid: if format: if format == 'xml': return self.wrap2dasxml(result['data']) elif format == 'json': return self.wrap2dasjson(result['data']) else: return self.error('Unsupported data format %s' % format) page = res else: url = '/das/records?' idict = dict(nrows=nresults, idx=idx, limit=limit, results=res, url=url) page = self.templatepage('das_pagination', **idict) form = self.form(uinput="") ctime = (time.time()-time0) page = self.page(form + page, ctime=ctime) return page except: return self.error(self.gen_error_msg(kwargs))
def apimap(self, dasquery): """ Analyze input query and yield url, api, args, format, expire for further processing. """ srv = self.name # get local copy to avoid threading issues cond = getarg(dasquery.mongo_query, 'spec', {}) instance = dasquery.mongo_query.get('instance', self.dbs_global) skeys = getarg(dasquery.mongo_query, 'fields', []) if not skeys: skeys = [] self.logger.info("\n") for api, value in self.map.items(): expire = value['expire'] iformat = value['format'] url = self.adjust_url(value['url'], instance) if not url: msg = '--- rejects API %s, no URL' % api self.logger.info(msg) continue args = dict(value['params']) # make new copy, since we'll adjust wild = value.get('wild_card', '*') found = 0 # check if input parameters are covered by API if not self.dasmapping.check_api_match(srv, api, cond): msg = '--- rejects API %s, does not cover input condition keys' \ % api self.logger.info(msg) continue # once we now that API covers input set of parameters we check # every input parameter for pattern matching for key, val in cond.items(): # check if keys from conditions are accepted by API # need to convert key (which is daskeys.map) into # input api parameter for apiparam in self.dasmapping.das2api(srv, api, key, val): if apiparam in args: args[apiparam] = val found += 1 # VK 20160708, wrong statement, it caused to pass # datasets API for query dataset in [path1, path2] # I'll leave block here until I test and verify that # commented out block will not cause other issues # # check the case when we only have single condition key # and it is the key we look-up # if not found and skeys == [k.split('.')[0] for k in cond.keys()]: # found = 1 # check if number of keys on cond and args are the same if len(cond.keys()) != found: msg = "--- reject API %s, not all condition keys are covered" \ % api self.logger.info(msg) msg = 'args=%s' % args self.logger.debug(msg) continue if not found: msg = "--- rejects API %s, parameters don't match" % api self.logger.info(msg) msg = 'args=%s' % args self.logger.debug(msg) continue self.adjust_params(api, args, instance) # delete args keys whose value is optional delete_keys(args, 'optional') # check that there is no "required" parameter left in args, # since such api will not work if 'required' in args.values(): msg = '--- rejects API %s, parameter is required' % api self.logger.info(msg) msg = 'args=%s' % args self.logger.debug(msg) continue # adjust pattern symbols in arguments if wild != '*': for key, val in args.items(): if isinstance(val, str) or isinstance(val, unicode): val = val.replace('*', wild) args[key] = val # compare query selection keys with API look-up keys api_lkeys = self.dasmapping.api_lkeys(srv, api) if set(api_lkeys) != set(skeys): msg = "--- rejects API %s, api_lkeys(%s)!=skeys(%s)"\ % (api, api_lkeys, skeys) self.logger.info(msg) continue msg = '+++ %s passes API %s' % (srv, api) self.logger.info(msg) msg = 'args=%s' % args self.logger.debug(msg) msg = "yield " msg += "system ***%s***, url=%s, api=%s, args=%s, format=%s, " \ % (srv, url, api, args, iformat) msg += "expire=%s, wild_card=%s" \ % (expire, wild) self.logger.debug(msg) yield url, api, args, iformat, expire
def yuijson(self, **kwargs): """ Provide JSON in YUI compatible format to be used in DynamicData table widget, see http://developer.yahoo.com/yui/examples/datatable/dt_dynamicdata.html """ rows = self.result(kwargs) rowlist = [] id = 0 for row in rows: das = row['das'] if type(das) is dict: das = [das] resdict = {} for jdx in range(0, len(das)): item = das[jdx] resdict[id] = id for idx in range(0, len(item['system'])): api = item['api'][idx] system = item['system'][idx] key = item['selection_keys'][idx] data = row[key] if type(data) is list: data = data[jdx] if type(data) is list: data = data[idx] # I need to extract from DAS object the values for UI keys for item in self.dasmapping.presentation(key): daskey = item['das'] uiname = item['ui'] if uiname not in resdict: resdict[uiname] = "" # look at key attributes, which may be compound as well # e.g. block.replica.se if type(data) is dict: result = dict(data) elif type(data) is list: result = list(data) else: result = data res = "" try: for elem in daskey.split('.')[1:]: if elem in result: res = result[elem] resdict[uiname] = res except: pass # pad = "" # jsoncode = {'jsoncode': json2html(data, pad)} # jsonhtml = self.templatepage('das_json', **jsoncode) # jsondict = {'id':id, 'system':system, 'api':api, key:jsonhtml} if resdict not in rowlist: rowlist.append(resdict) id += 1 idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) total = len(rowlist) jsondict = {'recordsReturned': len(rowlist), 'totalRecords': total, 'startIndex':idx, 'sort':'true', 'dir':'asc', 'pageSize': limit, 'records': rowlist} return jsondict
def decompose(self, query): """Extract selection keys and conditions from input query""" skeys = getarg(query, 'fields', []) cond = getarg(query, 'spec', {}) return skeys, cond
def tableview(self, head, data): """ Represent data in tabular view. """ kwargs = head.get('args') total = head.get('nresults', 0) incache = head.get('incache') dasquery = head.get('dasquery') uinput = kwargs.get('input', dasquery.query) inst = dasquery.instance filters = dasquery.filters idx = getarg(kwargs, 'idx', 0) limit = getarg(kwargs, 'limit', 10) sdir = getarg(kwargs, 'dir', '') titles = [] page = self.pagination(total, incache, kwargs) fltbar = self.filter_bar(dasquery) if filters: for flt in filters: if flt.find('=') != -1 or flt.find('>') != -1 or \ flt.find('<') != -1: continue titles.append(flt) style = 1 tpage = "" pkey = None for row in data: if not fltbar: fltbar = self.fltpage(row) try: # we don't need to show qhash in table view del row['qhash'] except: pass rec = [] if not pkey and row.has_key('das') and \ row['das'].has_key('primary_key'): pkey = row['das']['primary_key'].split('.')[0] if filters: for flt in filters: rec.append(DotDict(row).get(flt)) else: gen = self.convert2ui(row) titles = [] for uikey, val in gen: skip = 0 if not filters: if uikey in titles: skip = 1 else: titles.append(uikey) if not skip: rec.append(val) if style: style = 0 else: style = 1 link = '<a href="/das/records/%s?collection=merge">link</a>' \ % quote(str(row['_id'])) # cgi.escape the id tpage += self.templatepage('das_table_row', rec=rec, tag='td', \ style=style, encode=1, record=link) sdict = self.sort_dict(titles, pkey) if sdir == 'asc': sdir = 'desc' elif sdir == 'desc': sdir = 'asc' else: # default sort direction sdir = 'asc' args = {'input':uinput, 'idx':idx, 'limit':limit, 'instance':inst, \ 'view':'table'} theads = [] for title in titles: theads.append(title) theads.append('Record') thead = self.templatepage('das_table_row', rec=theads, tag='th', \ style=0, encode=0, record=0) self.sort_dict(titles, pkey) page += fltbar page += '<br />' page += '<table class="das_table">' + thead + tpage + '</table>' page += '<br />' page += '<div align="right">DAS cache server time: %5.3f sec</div>' \ % head['ctime'] return page
def listview(self, kwargs): """ provide DAS list view """ # force to load the page all the time cherrypy.response.headers['Cache-Control'] = 'no-cache' cherrypy.response.headers['Pragma'] = 'no-cache' time0 = time.time() ajaxreq = getarg(kwargs, 'ajax', 0) uinput = getarg(kwargs, 'input', '') limit = getarg(kwargs, 'limit', 10) show = getarg(kwargs, 'show', 'json') form = self.form(uinput=uinput) # self.status sends request to Cache Server # Cache Server uses das_core to retrieve status status = self.status(input=uinput, ajax=0) if status == 'no data': # no data in raw cache, send POST request self.send_request('POST', kwargs) ctime = (time.time()-time0) # page = self.templatepage('not_ready') page = self.status(input=uinput) page = self.page(form + page, ctime=ctime) return page elif status == 'fail': kwargs['reason'] = 'Unable to get status from data-service' return self.error(self.gen_error_msg(kwargs)) total = self.nresults(kwargs) rows = self.result(kwargs) nrows = len(rows) page = "" ndict = {'nrows':total, 'limit':limit} page = self.templatepage('das_nrecords', **ndict) # for nrecord in range(0, len(rows)): # row = rows[nrecord] # style = "white" # if nrecord % 2: # style = "white" # else: # style = "gray" style = "white" for row in rows: id = row['_id'] page += '<div class="%s"><hr class="line" />' % style gen = self.convert2ui(row) for uikey, value in [k for k, g in groupby(gen)]: page += "<b>%s</b>: %s<br />" % (uikey, value) pad = "" if show == 'json': jsoncode = {'jsoncode': json2html(row, pad)} jsonhtml = self.templatepage('das_json', **jsoncode) jsondict = dict(data=jsonhtml, id=id, rec_id=id) page += self.templatepage('das_row', **jsondict) elif show == 'code': code = pformat(row, indent=1, width=100) data = self.templatepage('das_code', code=code) datadict = {'data':data, 'id':id, rec_id:id} page += self.templatepage('das_row', **datadict) else: code = yaml.dump(row, width=100, indent=4, default_flow_style=False) data = self.templatepage('das_code', code=code) datadict = {'data':data, 'id':id, rec_id:id} page += self.templatepage('das_row', **datadict) page += '</div>' ctime = (time.time()-time0) return self.page(form + page, ctime=ctime)
def apimap(self, dasquery): """ Analyze input query and yield url, api, args, format, expire for further processing. """ srv = self.name # get local copy to avoid threading issues cond = getarg(dasquery.mongo_query, "spec", {}) instance = dasquery.mongo_query.get("instance", self.dbs_global) skeys = getarg(dasquery.mongo_query, "fields", []) if not skeys: skeys = [] self.logger.info("\n") for api, value in self.map.iteritems(): expire = value["expire"] iformat = value["format"] url = self.adjust_url(value["url"], instance) if not url: msg = "--- rejects API %s, no URL" % api self.logger.info(msg) continue args = dict(value["params"]) # make new copy, since we'll adjust wild = value.get("wild_card", "*") found = 0 # check if input parameters are covered by API if not self.dasmapping.check_api_match(srv, api, cond): msg = "--- rejects API %s, does not cover input condition keys" % api self.logger.info(msg) continue # once we now that API covers input set of parameters we check # every input parameter for pattern matching for key, val in cond.iteritems(): # check if keys from conditions are accepted by API # need to convert key (which is daskeys.map) into # input api parameter for apiparam in self.dasmapping.das2api(srv, api, key, val): if apiparam in args: args[apiparam] = val found += 1 # check if number of keys on cond and args are the same if len(cond.keys()) != found: msg = "--- reject API %s, not all condition keys are covered" % api self.logger.info(msg) msg = "args=%s" % args self.logger.debug(msg) continue if not found: msg = "--- rejects API %s, parameters don't match" % api self.logger.info(msg) msg = "args=%s" % args self.logger.debug(msg) continue self.adjust_params(api, args, instance) # delete args keys whose value is optional delete_keys(args, "optional") # check that there is no "required" parameter left in args, # since such api will not work if "required" in args.values(): msg = "--- rejects API %s, parameter is required" % api self.logger.info(msg) msg = "args=%s" % args self.logger.debug(msg) continue # adjust pattern symbols in arguments if wild != "*": for key, val in args.iteritems(): if isinstance(val, str) or isinstance(val, unicode): val = val.replace("*", wild) args[key] = val # compare query selection keys with API look-up keys api_lkeys = self.dasmapping.api_lkeys(srv, api) if set(api_lkeys) != set(skeys): msg = "--- rejects API %s, api_lkeys(%s)!=skeys(%s)" % (api, api_lkeys, skeys) self.logger.info(msg) continue msg = "+++ %s passes API %s" % (srv, api) self.logger.info(msg) msg = "args=%s" % args self.logger.debug(msg) msg = "yield " msg += "system ***%s***, url=%s, api=%s, args=%s, format=%s, " % (srv, url, api, args, iformat) msg += "expire=%s, wild_card=%s" % (expire, wild) self.logger.debug(msg) yield url, api, args, iformat, expire
def tableview(self, head, data): """ Represent data in tabular view. """ kwargs = head.get('args') total = head.get('nresults', 0) apilist = head.get('apilist') dasquery = head.get('dasquery') filters = dasquery.filters sdir = getarg(kwargs, 'dir', '') titles = [] page = self.pagination(head) fltbar = self.filter_bar(dasquery) if filters: for flt in filters: if flt.find('=') != -1 or flt.find('>') != -1 or \ flt.find('<') != -1: continue titles.append(flt) style = 1 tpage = "" pkey = None status = head.get('status', None) if status == 'fail': reason = head.get('reason', '') if reason: page += '<br/><span class="box_red">%s</span>' % reason for row in data: if not fltbar: fltbar = self.fltpage(row) try: # we don't need to show qhash in table view del row['qhash'] except: pass rec = [] if not pkey and 'das' in row and 'primary_key' in row['das']: pkey = row['das']['primary_key'].split('.')[0] if filters: for flt in filters: rec.append(DotDict(row).get(flt)) else: gen = self.convert2ui(row) titles = [] for uikey, val, _link, _desc, _examples in gen: skip = 0 if not filters: if uikey in titles: skip = 1 else: titles.append(uikey) if not skip: rec.append(val) if style: style = 0 else: style = 1 link = '<a href="/das/records/%s?collection=merge">link</a>' \ % quote(str(row['_id'])) # cgi.escape the id tpage += self.templatepage('das_table_row', rec=rec, tag='td', \ style=style, encode=1, record=link) if sdir == 'asc': sdir = 'desc' elif sdir == 'desc': sdir = 'asc' else: # default sort direction sdir = 'asc' theads = [] for title in titles: theads.append(title) theads.append('Record') thead = self.templatepage('das_table_row', rec=theads, tag='th', \ style=0, encode=0, record=0) page += fltbar page += '<br />' page += '<table class="das_table">' + thead + tpage + '</table>' page += '<br />' page += '<div align="right">DAS cache server time: %5.3f sec</div>' \ % head['ctime'] return page
def listview(self, kwargs): """ provide DAS list view """ # force to load the page all the time cherrypy.response.headers['Cache-Control'] = 'no-cache' cherrypy.response.headers['Pragma'] = 'no-cache' time0 = time.time() ajaxreq = getarg(kwargs, 'ajax', 0) uinput = getarg(kwargs, 'input', '') limit = getarg(kwargs, 'limit', 10) show = getarg(kwargs, 'show', 'json') form = self.form(uinput=uinput) # self.status sends request to Cache Server # Cache Server uses das_core to retrieve status status = self.status(input=uinput, ajax=0) if status == 'no data': # no data in raw cache, send POST request self.send_request('POST', kwargs) ctime = (time.time() - time0) # page = self.templatepage('not_ready') page = self.status(input=uinput) page = self.page(form + page, ctime=ctime) return page elif status == 'fail': kwargs['reason'] = 'Unable to get status from data-service' return self.error(self.gen_error_msg(kwargs)) total = self.nresults(kwargs) rows = self.result(kwargs) nrows = len(rows) page = "" ndict = {'nrows': total, 'limit': limit} page = self.templatepage('das_nrecords', **ndict) # for nrecord in range(0, len(rows)): # row = rows[nrecord] # style = "white" # if nrecord % 2: # style = "white" # else: # style = "gray" style = "white" for row in rows: id = row['_id'] page += '<div class="%s"><hr class="line" />' % style gen = self.convert2ui(row) for uikey, value in [k for k, g in groupby(gen)]: page += "<b>%s</b>: %s<br />" % (uikey, value) pad = "" if show == 'json': jsoncode = {'jsoncode': json2html(row, pad)} jsonhtml = self.templatepage('das_json', **jsoncode) jsondict = dict(data=jsonhtml, id=id, rec_id=id) page += self.templatepage('das_row', **jsondict) elif show == 'code': code = pformat(row, indent=1, width=100) data = self.templatepage('das_code', code=code) datadict = {'data': data, 'id': id, rec_id: id} page += self.templatepage('das_row', **datadict) else: code = yaml.dump(row, width=100, indent=4, default_flow_style=False) data = self.templatepage('das_code', code=code) datadict = {'data': data, 'id': id, rec_id: id} page += self.templatepage('das_row', **datadict) page += '</div>' ctime = (time.time() - time0) return self.page(form + page, ctime=ctime)