def run(out, host, query, idx, limit, debug, thr, ckey, cert): """ Worker function which performs query look-up in DAS and print results to stdout. It should be spawned as separate process to test DAS server. """ time0 = time.time() data = get_data(host, query, idx, limit, debug, thr, ckey, cert) if isinstance(data, dict): jsondict = data else: jsondict = json.loads(data) status = jsondict.get('status', None) reason = jsondict.get('reason', None) nres = jsondict.get('nresults', None) tstm = jsondict.get('timestamp', 0) data = jsondict.get('data') if data and isinstance(data, list) and len(data): qhash = data[0].get('qhash') else: qhash = DASQuery(query + ' instance=%s' % DBS_GLOBAL).qhash msg = 'status: %s client: %s server: %s nresults: %s query: %s qhash: %s' \ % (status, etime(time0), etime(tstm), nres, query, qhash) if nres == 0: print(jsondict) if reason: msg += ' reason: %s' % reason out.put((nres, status, qhash)) print(msg) if debug: if nres > 0: if len(data): print(data[0]) else: print("### NO DATA:", jsondict)
def run(out, host, query, idx, limit, debug, thr, ckey, cert): """ Worker function which performs query look-up in DAS and print results to stdout. It should be spawned as separate process to test DAS server. """ time0 = time.time() data = get_data(host, query, idx, limit, debug, thr, ckey, cert) if isinstance(data, dict): jsondict = data else: jsondict = json.loads(data) status = jsondict.get("status", None) reason = jsondict.get("reason", None) nres = jsondict.get("nresults", None) tstm = jsondict.get("timestamp", 0) data = jsondict.get("data") if data and isinstance(data, list) and len(data): qhash = data[0].get("qhash") else: qhash = DASQuery(query + " instance=%s" % DBS_GLOBAL).qhash msg = "status: %s client: %s server: %s nresults: %s query: %s qhash: %s" % ( status, etime(time0), etime(tstm), nres, query, qhash, ) if reason: msg += " reason: %s" % reason out.put((nres, status, qhash)) print msg if debug: if nres > 0: if len(data): print data[0] else: print "### NO DATA:", jsondict
def main(): "Main function" mgr = TestOptionParser() opts, _ = mgr.get_opt() ntests = opts.ntests if not ntests: print("Please specify number of tests to run, see options via --help") sys.exit(0) host = opts.host ckey = opts.ckey cert = opts.cert thr = 600 debug = opts.debug query = opts.query qfile = opts.qfile limit = opts.limit lkeys = [k.strip().replace('_', ',') for k in opts.lkeys.split(',')] uinput = query.replace('dataset=', '') if query.find('dataset=') == -1: print('Improper query="%s", please provide dataset query' \ % query) sys.exit(1) # check/start monitoring monitor_proc = None if opts.mon: if PSUTIL: monitor_proc = Process(target=monitor, args=(opts.mpid, opts.mout)) monitor_proc.daemon = True monitor_proc.start() else: print("Unable to load psutil package, turn off monitoring") # setup initial parameters time0 = time.time() if qfile: # read queries from query file status = 'qfile' else: idx = 0 limit = 0 # fetch all datasets data = get_data(host, query, idx, limit, debug, thr, ckey, cert) if isinstance(data, dict): jsondict = data else: jsondict = json.loads(data) status = jsondict.get('status', None) pool = {} out = Queue() if status == 'ok': nres = jsondict.get('nresults', None) sec = "(all reported times are in seconds)" print("Seed query results: status %s, nrecords %s, time %s %s" \ % (status, nres, etime(time0), sec)) if nres: idx = 0 limit = opts.limit # control how many records to get datasets = [r['dataset'][0]['name'] for r in jsondict['data'] \ if r['dataset'][0]['name'] != uinput] if ntests > len(datasets): msg = 'Number of tests exceed number of found datasets. ' msg += 'Please use another value for ntests' msg += 'ndatasets=%s, ntests=%s' % (len(datasets), ntests) print('\nERROR:', msg) sys.exit(1) datasets.sort() if debug: print("Found %s datasets" % len(datasets)) print("First %s datasets:" % ntests) for dataset in datasets[:ntests]: print(dataset) for dataset in datasets[:ntests]: jdx = random.randint(0, len(lkeys)-1) skey = lkeys[jdx] # get random select key query = '%s dataset=%s' % (skey, dataset) idx = 0 # always start from first record args = (out, host, query, idx, limit, debug, thr, ckey, cert) proc = Process(target=run, args=args) proc.start() pool[proc.name] = proc elif status == 'qfile': flist = [f.replace('\n', '') for f in open(qfile, 'r').readlines()] for query in random.sample(flist, ntests): idx = 0 # always start from first record args = (out, host, query, idx, limit, debug, thr, ckey, cert) proc = Process(target=run, args=args) proc.start() pool[proc.name] = proc else: print('DAS cli fails status=%s, query=%s' % (status, query)) print(jsondict) # wait for all processes to finish their tasks while 1: for pname in pool.keys(): if not pool[pname].is_alive(): del pool[pname] if len(pool.keys()) == 0: break time.sleep(1) if opts.mon and PSUTIL: monitor_proc.terminate() # retrieve results tot_ok = 0 tot_fail = [] tot_zero = [] while True: try: res, status, qhash = out.get_nowait() if status == 'ok': if res: tot_ok += 1 else: tot_zero.append(qhash) else: tot_fail.append(qhash) except: break print("+++ SUMMARY:") print("# queries :", ntests) print("status ok :", tot_ok) print("status fail:", len(tot_fail), tot_fail) print("nresults 0 :", len(tot_zero), tot_zero)
def main(): "Main function" mgr = TestOptionParser() opts, _ = mgr.get_opt() ntests = opts.ntests if not ntests: print "Please specify number of tests to run, see options via --help" sys.exit(0) host = opts.host ckey = opts.ckey cert = opts.cert thr = 600 debug = opts.debug query = opts.query lkeys = [k.strip().replace("_", ",") for k in opts.lkeys.split(",")] uinput = query.replace("dataset=", "") if query.find("dataset=") == -1: print 'Improper query="%s", please provide dataset query' % query sys.exit(1) # check/start monitoring monitor_proc = None if opts.mon: if PSUTIL: monitor_proc = Process(target=monitor, args=(opts.mpid, opts.mout)) monitor_proc.daemon = True monitor_proc.start() else: print "Unable to load psutil package, turn off monitoring" # setup initial parameters time0 = time.time() idx = 0 limit = 0 # fetch all datasets data = get_data(host, query, idx, limit, debug, thr, ckey, cert) if isinstance(data, dict): jsondict = data else: jsondict = json.loads(data) status = jsondict.get("status", None) pool = {} out = Queue() if status == "ok": nres = jsondict.get("nresults", None) sec = "(all reported times are in seconds)" print "Seed query results: status %s, nrecords %s, time %s %s" % (status, nres, etime(time0), sec) if nres: idx = 0 limit = opts.limit # control how many records to get datasets = [r["dataset"][0]["name"] for r in jsondict["data"] if r["dataset"][0]["name"] != uinput] if ntests > len(datasets): msg = "Number of tests exceed number of found datasets. " msg += "Please use another value for ntests" msg += "ndatasets=%s, ntests=%s" % (len(datasets), opts.ntests) print "\nERROR:", msg sys.exit(1) datasets.sort() if debug: print "Found %s datasets" % len(datasets) print "First %s datasets:" % opts.ntests for dataset in datasets[: opts.ntests]: print dataset for dataset in datasets[: opts.ntests]: jdx = random.randint(0, len(lkeys) - 1) skey = lkeys[jdx] # get random select key query = "%s dataset=%s" % (skey, dataset) idx = 0 # always start from first record args = (out, host, query, idx, limit, debug, thr, ckey, cert) proc = Process(target=run, args=args) proc.start() pool[proc.name] = proc else: print "DAS cli fails status=%s, query=%s" % (status, query) print jsondict # wait for all processes to finish their tasks while 1: for pname in pool.keys(): if not pool[pname].is_alive(): del pool[pname] if len(pool.keys()) == 0: break time.sleep(1) if opts.mon and PSUTIL: monitor_proc.terminate() # retrieve results tot_ok = 0 tot_fail = [] tot_zero = [] while True: try: res, status, qhash = out.get_nowait() if status == "ok": if res: tot_ok += 1 else: tot_zero.append(qhash) else: tot_fail.append(qhash) except: break print "+++ SUMMARY:" print "# queries :", opts.ntests print "status ok :", tot_ok print "status fail:", len(tot_fail), tot_fail print "nresults 0 :", len(tot_zero), tot_zero
def main(): "Main function" mgr = TestOptionParser() opts, _ = mgr.get_opt() ntests = opts.ntests if not ntests: print("Please specify number of tests to run, see options via --help") sys.exit(0) host = opts.host ckey = opts.ckey cert = opts.cert thr = 600 debug = opts.debug query = opts.query qfile = opts.qfile limit = opts.limit lkeys = [k.strip().replace('_', ',') for k in opts.lkeys.split(',')] uinput = query.replace('dataset=', '') if query.find('dataset=') == -1: print('Improper query="%s", please provide dataset query' \ % query) sys.exit(1) # check/start monitoring monitor_proc = None if opts.mon: if PSUTIL: monitor_proc = Process(target=monitor, args=(opts.mpid, opts.mout)) monitor_proc.daemon = True monitor_proc.start() else: print("Unable to load psutil package, turn off monitoring") # setup initial parameters time0 = time.time() if qfile: # read queries from query file status = 'qfile' else: idx = 0 limit = 0 # fetch all datasets data = get_data(host, query, idx, limit, debug, thr, ckey, cert) if isinstance(data, dict): jsondict = data else: jsondict = json.loads(data) status = jsondict.get('status', None) pool = {} out = Queue() if status == 'ok': nres = jsondict.get('nresults', None) sec = "(all reported times are in seconds)" print("Seed query results: status %s, nrecords %s, time %s %s" \ % (status, nres, etime(time0), sec)) if nres: idx = 0 limit = opts.limit # control how many records to get datasets = [r['dataset'][0]['name'] for r in jsondict['data'] \ if r['dataset'][0]['name'] != uinput] if ntests > len(datasets): msg = 'Number of tests exceed number of found datasets. ' msg += 'Please use another value for ntests' msg += 'ndatasets=%s, ntests=%s' % (len(datasets), ntests) print('\nERROR:', msg) sys.exit(1) datasets.sort() if debug: print("Found %s datasets" % len(datasets)) print("First %s datasets:" % ntests) for dataset in datasets[:ntests]: print(dataset) for dataset in datasets[:ntests]: jdx = random.randint(0, len(lkeys) - 1) skey = lkeys[jdx] # get random select key query = '%s dataset=%s' % (skey, dataset) idx = 0 # always start from first record args = (out, host, query, idx, limit, debug, thr, ckey, cert) proc = Process(target=run, args=args) proc.start() pool[proc.name] = proc elif status == 'qfile': flist = [f.replace('\n', '') for f in open(qfile, 'r').readlines()] for query in random.sample(flist, ntests): idx = 0 # always start from first record args = (out, host, query, idx, limit, debug, thr, ckey, cert) proc = Process(target=run, args=args) proc.start() pool[proc.name] = proc else: print('DAS cli fails status=%s, query=%s' % (status, query)) print(jsondict) # wait for all processes to finish their tasks while 1: for pname in pool.keys(): if not pool[pname].is_alive(): del pool[pname] if len(pool.keys()) == 0: break time.sleep(1) if opts.mon and PSUTIL: monitor_proc.terminate() # retrieve results tot_ok = 0 tot_fail = [] tot_zero = [] while True: try: res, status, qhash = out.get_nowait() if status == 'ok': if res: tot_ok += 1 else: tot_zero.append(qhash) else: tot_fail.append(qhash) except: break print("+++ SUMMARY:") print("# queries :", ntests) print("status ok :", tot_ok) print("status fail:", len(tot_fail), tot_fail) print("nresults 0 :", len(tot_zero), tot_zero)