def request(self, path, fields=None, headers=None, timeout=None): url = "%s%s" % (self.url, path) url_full = "%s?%s" % (url, urlencode(fields)) try: result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=fields, headers=headers, timeout=timeout, preload_content=False) except BaseException as err: self.fail() log.exception("RemoteFinder[%s] Error requesting %s: %s" % (self.host, url_full, err)) raise Exception("Error requesting %s: %s" % (url_full, err)) if result.status != 200: result.release_conn() self.fail() log.exception("RemoteFinder[%s] Error response %d from %s" % (self.host, result.status, url_full)) raise Exception("Error response %d from %s" % (result.status, url_full)) result.url_full = url_full # reset last failure time so that retried fetches can re-enable a remote self.last_failure = 0 log.debug("RemoteFinder[%s] Fetched %s" % (self.host, url_full)) return result
def request(self, path, fields=None, headers=None, timeout=None): url = "%s%s" % (self.url, path) url_full = "%s?%s" % (url, urlencode(fields)) try: result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=fields, headers=headers, timeout=timeout, preload_content=False) except BaseException as err: self.fail() log.exception("RemoteFinder[%s] Error requesting %s: %s" % (self.host, url_full, err)) raise Exception("Error requesting %s: %s" % (url_full, err)) if result.status != 200: result.release_conn() self.fail() log.exception( "RemoteFinder[%s] Error response %d from %s" % (self.host, result.status, url_full)) raise Exception("Error response %d from %s" % (result.status, url_full)) result.url_full = url_full # reset last failure time so that retried fetches can re-enable a remote self.last_failure = 0 log.debug("RemoteFinder[%s] Fetched %s" % (self.host, url_full)) return result
def _fetch(self, url, query_string, query_params, headers): url_full = "%s?%s" % (url, query_string) log.debug( "RemoteReader:: Starting to execute _fetch %s" % url_full) try: log.debug("ReadResult:: Requesting %s" % url_full) result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FETCH_TIMEOUT, ) if result.status != 200: self.store.fail() self.log_error("ReadResult:: Error response %d from %s" % url_full) data = [] else: data = unpickle.loads(result.data) except Exception as err: self.store.fail() self.log_error("ReadResult:: Error requesting %s: %s" % (url_full, err)) data = [] log.debug("RemoteReader:: Completed _fetch %s" % url_full) return data
def _fetch(self, url, query_string, query_params, headers): url_full = "%s?%s" % (url, query_string) log.debug( "RemoteReader:: Starting to execute _fetch %s" % url_full) try: log.debug("ReadResult:: Requesting %s" % url_full) result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FETCH_TIMEOUT, ) if result.status != 200: self.store.fail() self.log_error("ReadResult:: Error response %d from %s" % url_full) data = [] else: data = unpickle.loads(result.data) except Exception as err: self.store.fail() self.log_error("ReadResult:: Error requesting %s: %s" % (url_full, err)) data = [] log.debug("RemoteReader:: Completed _fetch %s" % url_full) return data
def request(self, method, url, fields, requestContext=None): headers = requestContext.get( 'forwardHeaders') if requestContext else {} if 'Authorization' not in headers and self.username and self.password: headers['Authorization'] = 'Basic ' + ( '%s:%s' % (self.username, self.password)).encode('base64') req_fields = [] for (field, value) in fields.items(): if value is None: continue if isinstance(value, list) or isinstance(value, tuple): req_fields.extend([(field, v) for v in value if v is not None]) else: req_fields.append((field, value)) result = http.request( method, self.base_url + url, fields=req_fields, headers=headers, timeout=self.settings.REMOTE_FIND_TIMEOUT, ) if result.status == 400: raise ValueError( json.loads(result.data.decode('utf-8')).get('error')) if result.status != 200: raise Exception('HTTP Error from remote tagdb: %s %s' % (result.status, result.data)) return json.loads(result.data.decode('utf-8'))
def request(self, method, url, fields, requestContext=None): headers = requestContext.get('forwardHeaders') if requestContext else {} if 'Authorization' not in headers and self.username and self.password: user_pw = '%s:%s' % (self.username, self.password) if sys.version_info[0] >= 3: user_pw_b64 = b2a_base64(user_pw.encode('utf-8')).decode('ascii') else: user_pw_b64 = user_pw.encode('base64') headers['Authorization'] = 'Basic ' + user_pw_b64 req_fields = [] for (field, value) in fields.items(): if value is None: continue if isinstance(value, list) or isinstance(value, tuple): req_fields.extend([(field, v) for v in value if v is not None]) else: req_fields.append((field, value)) result = http.request( method, self.base_url + url, fields=req_fields, headers=headers, timeout=self.settings.FIND_TIMEOUT, ) if result.status == 400: raise ValueError(json.loads(result.data.decode('utf-8')).get('error')) if result.status != 200: raise Exception('HTTP Error from remote tagdb: %s %s' % (result.status, result.data)) return json.loads(result.data.decode('utf-8'))
def request(self, method, url, fields=None, requestContext=None): if not fields: fields = {} headers = requestContext.get( 'forwardHeaders') if requestContext else {} if 'Authorization' not in headers and self.username and self.password: headers['Authorization'] = 'Basic ' + ( '%s:%s' % (self.username, self.password)).encode('base64') result = http.request( method, self.base_url + url, fields={ field: value for (field, value) in fields.items() if value is not None }, headers=headers, timeout=settings.REMOTE_FIND_TIMEOUT, ) if result.status != 200: raise Exception('HTTP Error from remote tagdb: %s' % result.status) return json.loads(result.data.decode('utf-8'))
def send(self, headers=None, msg_setter=None): log.debug("FindRequest.send(host=%s, query=%s) called" % (self.store.host, self.query)) if headers is None: headers = {} results = cache.get(self.cacheKey) if results is not None: log.debug( "FindRequest.send(host=%s, query=%s) using cached result" % (self.store.host, self.query)) else: url = "%s://%s/metrics/find/" % ( 'https' if settings.INTRACLUSTER_HTTPS else 'http', self.store.host) query_params = [ ('local', '1'), ('format', 'pickle'), ('query', self.query.pattern), ] if self.query.startTime: query_params.append(('from', self.query.startTime)) if self.query.endTime: query_params.append(('until', self.query.endTime)) try: result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FIND_TIMEOUT) except BaseException: log.exception( "FindRequest.send(host=%s, query=%s) exception during request" % (self.store.host, self.query)) self.store.fail() return if result.status != 200: log.exception( "FindRequest.send(host=%s, query=%s) error response %d from %s?%s" % (self.store.host, self.query, result.status, url, urlencode(query_params))) self.store.fail() return try: results = unpickle.loads(result.data) except BaseException: log.exception( "FindRequest.send(host=%s, query=%s) exception processing response" % (self.store.host, self.query)) self.store.fail() return cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION) msg_setter('host: {host}, query: {query}'.format(host=self.store.host, query=self.query)) for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet([ Interval(interval[0], interval[1]) for interval in intervals ]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader(self.store, node_info, bulk_query=[self.query.pattern]) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False yield node
def send(self, headers=None, msg_setter=None): log.debug( "FindRequest.send(host=%s, query=%s) called" % (self.store.host, self.query)) if headers is None: headers = {} results = cache.get(self.cacheKey) if results is not None: log.debug( "FindRequest.send(host=%s, query=%s) using cached result" % (self.store.host, self.query)) else: url = "%s://%s/metrics/find/" % ( 'https' if settings.INTRACLUSTER_HTTPS else 'http', self.store.host) query_params = [ ('local', '1'), ('format', 'pickle'), ('query', self.query.pattern), ] if self.query.startTime: query_params.append(('from', self.query.startTime)) if self.query.endTime: query_params.append(('until', self.query.endTime)) try: result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FIND_TIMEOUT) except BaseException: log.exception( "FindRequest.send(host=%s, query=%s) exception during request" % (self.store.host, self.query)) self.store.fail() return if result.status != 200: log.exception( "FindRequest.send(host=%s, query=%s) error response %d from %s?%s" % (self.store.host, self.query, result.status, url, urlencode(query_params))) self.store.fail() return try: results = unpickle.loads(result.data) except BaseException: log.exception( "FindRequest.send(host=%s, query=%s) exception processing response" % (self.store.host, self.query)) self.store.fail() return cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION) msg_setter( 'host: {host}, query: {query}'.format( host=self.store.host, query=self.query)) for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet( [Interval(interval[0], interval[1]) for interval in intervals]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader( self.store, node_info, bulk_query=[ self.query.pattern]) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False yield node