def runQuery(cls, qparams={}): searchurl = cls._solr_http_url + "/collection1/select" if qparams: searchurl += "?" + "&".join( [key + "=" + value for key, value in qparams.items()]) code, data, headers = util.httpRequest(searchurl) assert code == 200 return data
def runQuery(cls, qparams=None): if not qparams: qparams = {} searchurl = cls._es_http_url + "/_search" if qparams: searchurl += "?" + "&".join( [key + "=" + value for key, value in qparams.items()]) code, data, _headers = util.httpRequest(searchurl) assert code == 200 return util.getJSON(data)
def waitForSuccessfulEvents(self, metrics, timeout=120, interval=10): modmetrics = [] if not self._mport: return for metric in metrics: items = metric.split() assert len(items) == 4, "Invalid metric definition: " + metric type = items[0].upper() assert type in ( 'SOURCE', 'CHANNEL', 'SINK'), "Invalid metric type '%s' in definition: %s" % ( items[0], metric) if type == 'SOURCE': metricType = 'EventAcceptedCount' elif type == 'CHANNEL': metricType = 'EventTakeSuccessCount' else: metricType = 'EventDrainSuccessCount' modmetric = "int(jsoncontent['%s.%s']['%s']) %s %s" % ( type, items[1], metricType, items[2], items[3]) modmetrics.append(modmetric) starttime = time.time() url = "http://%s:%d/metrics" % (Machine.getfqdn(), self._mport) while time.time() - starttime < timeout: retcode, retdata, retheaders = util.httpRequest(url) if retcode == 200: jsoncontent = util.getJSON(retdata) satisfy = True for metric in modmetrics: try: if not eval(metric): satisfy = False break except KeyError: satisfy = False if satisfy: return time.sleep(interval)
def _update(cls, data, commit=False): updateurl = cls._solr_http_url + "/update" if commit: updateurl += "/?commit=true" headers = {'Content-Type': 'text/xml', 'charset': 'utf-8'} return util.httpRequest(updateurl, headers=headers, data=data)
def clearAllIndexes(cls): util.httpRequest(cls._es_http_url + "/_all", method='DELETE')