def test_RemoteReader_get_intervals(self): test_finder = RemoteFinder() finder = test_finder.remote_stores[0] reader = RemoteReader(finder, {'intervals': []}, bulk_query='a.b.c.d') self.assertEqual(reader.get_intervals(), [])
def fetch(self, nodes_or_patterns, start_time, end_time, now=None, requestContext=None): # Go through all of the remote nodes, and launch a fetch for each one. # Each fetch will take place in its own thread, since it's naturally # parallel work. patterns = [] for v in nodes_or_patterns: if isinstance(v, basestring): patterns.append(v) else: patterns.append(v.path) results = [] for store in self.remote_stores: reader = RemoteReader( store, {'intervals': []}, bulk_query=patterns) result = reader.fetch_list(start_time, end_time, now, requestContext) results.append(result) def _extract(): for result in results: result = wait_for_result(result) for series in result: yield { 'pathExpression': series.get('pathExpression', series['name']), 'name': series['name'], 'time_info': (series['start'], series['end'], series['step']), 'values': series['values'], } return FetchInProgress(_extract)
def test_RemoteReader_fetch_list_empty_bulk_query(self): test_finder = RemoteFinder() finder = test_finder.remote_stores[0] reader = RemoteReader(finder, {'intervals': []}, bulk_query='') startTime = 1496262000 endTime = 1496262060 ret = reader.fetch_list(startTime, endTime) self.assertEqual(ret, [])
def test_RemoteReader_fetch(self, http_request): test_finders = RemoteFinder.factory() finder = test_finders[0] startTime = 1496262000 endTime = 1496262060 # no path or bulk_query reader = RemoteReader(finder,{}) self.assertEqual(reader.bulk_query, []) result = reader.fetch(startTime, endTime) self.assertEqual(result, None) self.assertEqual(http_request.call_count, 0) # path & bulk_query reader = RemoteReader(finder, {'intervals': [], 'path': 'a.b.c.d'}, bulk_query=['a.b.c.*']) data = [ {'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.c' }, {'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' } ] responseObject = HTTPResponse(body=BytesIO(pickle.dumps(data)), status=200, preload_content=False) http_request.return_value = responseObject result = reader.fetch(startTime, endTime) expected_response = ((1496262000, 1496262060, 60), [1.0, 0.0, 1.0, 0.0, 1.0]) self.assertEqual(result, expected_response) self.assertEqual(http_request.call_args[0], ( 'GET', 'http://127.0.0.1/render/', )) self.assertEqual(http_request.call_args[1], { 'fields': [ ('format', 'pickle'), ('local', '1'), ('noCache', '1'), ('from', startTime), ('until', endTime), ('target', 'a.b.c.*'), ], 'headers': None, 'preload_content': False, 'timeout': 10, })
def test_RemoteReader_init_repr_get_intervals(self): finders = RemoteFinder.factory() self.assertEqual(len(finders), 2) self.assertEqual(finders[0].host, '127.0.0.1') self.assertEqual(finders[1].host, '8.8.8.8') finder = finders[0] reader = RemoteReader(finder, {'intervals': []}, bulk_query=['a.b.c.d']) self.assertIsNotNone(reader) self.assertRegexpMatches(str(reader), "<RemoteReader\[.*\]: 127.0.0.1 a.b.c.d>") self.assertEqual(reader.get_intervals(), [])
def fetch(self, nodes_or_patterns, start_time, end_time, now=None, requestContext=None): # Go through all of the remote nodes, and launch a fetch for each one. # Each fetch will take place in its own thread, since it's naturally # parallel work. patterns = [] for v in nodes_or_patterns: if isinstance(v, basestring): patterns.append(v) else: patterns.append(v.path) results = [] for store in self.remote_stores: reader = RemoteReader(store, {'intervals': []}, bulk_query=patterns) result = reader.fetch_list(start_time, end_time, now, requestContext) results.append(result) def _extract(): for result in results: result = wait_for_result(result) for series in result: yield { 'pathExpression': series.get('pathExpression', series['name']), 'name': series['name'], 'time_info': (series['start'], series['end'], series['step']), 'values': series['values'], } return FetchInProgress(_extract)
def test_RemoteReader_fetch(self, http_request): test_finders = RemoteFinder.factory() finder = test_finders[0] startTime = 1496262000 endTime = 1496262060 # no path or bulk_query reader = RemoteReader(finder, {}) self.assertEqual(reader.bulk_query, []) result = reader.fetch(startTime, endTime) self.assertEqual(result, None) self.assertEqual(http_request.call_count, 0) # path & bulk_query reader = RemoteReader(finder, { 'intervals': [], 'path': 'a.b.c.d' }, bulk_query=['a.b.c.*']) data = [{ 'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.c' }, { 'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' }] responseObject = HTTPResponse(body=StringIO(pickle.dumps(data)), status=200) http_request.return_value = responseObject result = reader.fetch(startTime, endTime) expected_response = ((1496262000, 1496262060, 60), [1.0, 0.0, 1.0, 0.0, 1.0]) self.assertEqual(result, expected_response) self.assertEqual(http_request.call_args[0], ( 'GET', 'http://127.0.0.1/render/', )) self.assertEqual( http_request.call_args[1], { 'fields': [ ('format', 'pickle'), ('local', '1'), ('noCache', '1'), ('from', startTime), ('until', endTime), ('target', 'a.b.c.*'), ], 'headers': None, 'timeout': 10, })
def test_RemoteReader_fetch(self, http_request): test_finder = RemoteFinder() finder = test_finder.remote_stores[0] reader = RemoteReader(finder, {'intervals': [], 'path': 'a.b.c.d'}, bulk_query='a.b.c.d') startTime = 1496262000 endTime = 1496262060 data = [ {'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' } ] responseObject = HTTPResponse(body=StringIO(pickle.dumps(data)), status=200) http_request.return_value = responseObject ret = reader.fetch(startTime, endTime) expected_response = ((1496262000, 1496262060, 60), [1.0, 0.0, 1.0, 0.0, 1.0]) self.assertEqual(ret.waitForResults(), expected_response)
def test_RemoteReader__fetch(self, http_request): startTime = 1496262000 endTime = 1496262060 url = 'http://127.0.0.1/render/' query_string = 'a.b.c.d' query_params = [ ('format', 'pickle'), ('local', '1'), ('noCache', '1'), ('from', str(int(startTime))), ('until', str(int(endTime))) ] headers = '' test_finder = RemoteFinder() finder = test_finder.remote_stores[0] reader = RemoteReader(finder, {'intervals': [], 'path': 'a.b.c.d'}, bulk_query='a.b.c.d') # Response was an error responseObject = HTTPResponse(status=400) http_request.return_value = responseObject ret = reader._fetch(url, query_string, query_params, headers) self.assertEqual(ret, []) # 200 response with bad result.data responseObject = HTTPResponse(status=200) http_request.return_value = responseObject ret = reader._fetch(url, query_string, query_params, headers) self.assertEqual(ret, []) # 200 response with good result.data responseObject = HTTPResponse(body=StringIO(pickle.dumps(['a'])), status=200) http_request.return_value = responseObject ret = reader._fetch(url, query_string, query_params, headers) self.assertEqual(ret, ['a'])
def test_RemoteReader_fetch_list_no_worker_pool(self, http_request): test_finder = RemoteFinder() finder = test_finder.remote_stores[0] reader = RemoteReader(finder, {'intervals': []}, bulk_query='a.b.c.d') startTime = 1496262000 endTime = 1496262060 data = [ {'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' } ] # 200 response with good result.data responseObject = HTTPResponse(body=StringIO(pickle.dumps(data)), status=200) http_request.return_value = responseObject ret = reader.fetch_list(startTime, endTime) data[0]['path'] = 'a.b.c.d' self.assertEqual(ret.waitForResults(), data)
def test_RemoteFinder_fetch(self, http_request): finder = test_finder = RemoteFinder() store = test_finder.remote_stores[0] reader = RemoteReader(store, {'intervals': [], 'path': 'a.b.c.d'}, bulk_query='a.b.c.d') node = LeafNode('a.b.c.d', reader) startTime = 1496262000 endTime = 1496262060 data = [ {'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' } ] responseObject = HTTPResponse(body=StringIO(pickle.dumps(data)), status=200) http_request.return_value = responseObject ret = finder.fetch(['a.b.c.d'], startTime, endTime) expected_response = ((1496262000, 1496262060, 60), [1.0, 0.0, 1.0, 0.0, 1.0]) expected_response = [ { 'name': 'a.b.c.d', 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'pathExpression': 'a.b.c.d', 'time_info': (1496262000, 1496262060, 60) }, { 'name': 'a.b.c.d', 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'pathExpression': 'a.b.c.d', 'time_info': (1496262000, 1496262060, 60) } ] result = list(ret.waitForResults()) self.assertEqual(result, expected_response)
def test_RemoteReader_fetch_multi(self, http_request): test_finders = RemoteFinder.factory() finder = test_finders[0] startTime = 1496262000 endTime = 1496262060 # no path or bulk_query reader = RemoteReader(finder, {}) self.assertEqual(reader.bulk_query, []) result = reader.fetch_multi(startTime, endTime) self.assertEqual(result, []) self.assertEqual(http_request.call_count, 0) # path reader = RemoteReader(finder, {'intervals': [], 'path': 'a.b.c.d'}) data = [{ 'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' }] responseObject = HTTPResponse(body=StringIO(pickle.dumps(data)), status=200, preload_content=False) http_request.return_value = responseObject result = reader.fetch_multi(startTime, endTime) expected_response = [{ 'pathExpression': 'a.b.c.d', 'name': 'a.b.c.d', 'time_info': (1496262000, 1496262060, 60), 'values': [1.0, 0.0, 1.0, 0.0, 1.0], }] self.assertEqual(result, expected_response) self.assertEqual(http_request.call_args[0], ( 'GET', 'http://127.0.0.1/render/', )) self.assertEqual( http_request.call_args[1], { 'fields': [ ('format', 'pickle'), ('local', '1'), ('noCache', '1'), ('from', startTime), ('until', endTime), ('target', 'a.b.c.d'), ], 'headers': None, 'preload_content': False, 'timeout': 10, }) # bulk_query & now finder = test_finders[1] reader = RemoteReader(finder, { 'intervals': [], 'path': 'a.b.c.d' }, bulk_query=['a.b.c.d']) data = [{ 'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' }] responseObject = HTTPResponse( body=StringIO(msgpack.dumps(data)), status=200, preload_content=False, headers={'Content-Type': 'application/x-msgpack'}) http_request.return_value = responseObject result = reader.fetch_multi( startTime, endTime, now=endTime, requestContext={'forwardHeaders': { 'Authorization': 'Basic xxxx' }}) expected_response = [{ 'pathExpression': 'a.b.c.d', 'name': 'a.b.c.d', 'time_info': (1496262000, 1496262060, 60), 'values': [1.0, 0.0, 1.0, 0.0, 1.0], }] self.assertEqual(result, expected_response) self.assertEqual(http_request.call_args[0], ( 'GET', 'http://8.8.8.8/graphite/render/', )) self.assertEqual( http_request.call_args[1], { 'fields': [ ('format', 'msgpack'), ('local', '0'), ('noCache', '1'), ('from', startTime), ('until', endTime), ('target', 'a.b.c.d'), ('now', endTime), ], 'headers': { 'Authorization': 'Basic xxxx' }, 'preload_content': False, 'timeout': 10, }) # non-pickle response responseObject = HTTPResponse(body=StringIO('error'), status=200, preload_content=False) http_request.return_value = responseObject with self.assertRaisesRegexp( Exception, 'Error decoding render response from http://[^ ]+: .+'): reader.fetch(startTime, endTime) # non-200 response responseObject = HTTPResponse(body=StringIO('error'), status=500, preload_content=False) http_request.return_value = responseObject with self.assertRaisesRegexp(Exception, 'Error response 500 from http://[^ ]+'): reader.fetch(startTime, endTime) # exception raised by request() http_request.side_effect = Exception('error') with self.assertRaisesRegexp(Exception, 'Error requesting http://[^ ]+: error'): reader.fetch(startTime, endTime)
def send(self, headers=None, msg_setter=None): log.debug("FindRequest.send(host=%s, query=%s) called" % (self.store.host, self.query)) if headers is None: headers = {} results = cache.get(self.cacheKey) if results is not None: log.debug( "FindRequest.send(host=%s, query=%s) using cached result" % (self.store.host, self.query)) else: url = "%s://%s/metrics/find/" % ( 'https' if settings.INTRACLUSTER_HTTPS else 'http', self.store.host) query_params = [ ('local', '1'), ('format', 'pickle'), ('query', self.query.pattern), ] if self.query.startTime: query_params.append(('from', self.query.startTime)) if self.query.endTime: query_params.append(('until', self.query.endTime)) try: result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FIND_TIMEOUT) except BaseException: log.exception( "FindRequest.send(host=%s, query=%s) exception during request" % (self.store.host, self.query)) self.store.fail() return if result.status != 200: log.exception( "FindRequest.send(host=%s, query=%s) error response %d from %s?%s" % (self.store.host, self.query, result.status, url, urlencode(query_params))) self.store.fail() return try: results = unpickle.loads(result.data) except BaseException: log.exception( "FindRequest.send(host=%s, query=%s) exception processing response" % (self.store.host, self.query)) self.store.fail() return cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION) msg_setter('host: {host}, query: {query}'.format(host=self.store.host, query=self.query)) for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet([ Interval(interval[0], interval[1]) for interval in intervals ]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader(self.store, node_info, bulk_query=[self.query.pattern]) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False yield node
def find_nodes(self, query, timer=None): timer.set_msg('host: {host}, query: {query}'.format(host=self.host, query=query)) log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" % (self.host, query)) # prevent divide by 0 cacheTTL = settings.FIND_CACHE_DURATION or 1 if query.startTime: start = query.startTime - (query.startTime % cacheTTL) else: start = "" if query.endTime: end = query.endTime - (query.endTime % cacheTTL) else: end = "" cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash( query.pattern), start, end) results = cache.get(cacheKey) if results is not None: log.debug( "RemoteFinder.find_nodes(host=%s, query=%s) using cached result" % (self.host, query)) else: url = '/metrics/find/' query_params = [ ('local', self.params.get('local', '1')), ('format', self.params.get('format', 'pickle')), ('query', query.pattern), ] if query.startTime: query_params.append(('from', int(query.startTime))) if query.endTime: query_params.append(('until', int(query.endTime))) result = self.request(url, fields=query_params, headers=query.headers, timeout=settings.REMOTE_FIND_TIMEOUT) try: if result.getheader('content-type') == 'application/x-msgpack': results = msgpack.load(BufferedHTTPReader( result, buffer_size=settings.REMOTE_BUFFER_SIZE), encoding='utf-8') else: results = unpickle.load( BufferedHTTPReader( result, buffer_size=settings.REMOTE_BUFFER_SIZE)) except Exception as err: self.fail() log.exception( "RemoteFinder[%s] Error decoding find response from %s: %s" % (self.host, result.url_full, err)) raise Exception("Error decoding find response from %s: %s" % (result.url_full, err)) finally: result.release_conn() cache.set(cacheKey, results, settings.FIND_CACHE_DURATION) for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet([ Interval(interval[0], interval[1]) for interval in intervals ]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader(self, node_info) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False yield node
def find_nodes(self, query, timer=None): timer.set_msg('host: {host}, query: {query}'.format(host=self.host, query=query)) log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" % (self.host, query)) # prevent divide by 0 cacheTTL = settings.FIND_CACHE_DURATION or 1 if query.startTime: start = query.startTime - (query.startTime % cacheTTL) else: start = "" if query.endTime: end = query.endTime - (query.endTime % cacheTTL) else: end = "" cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash( query.pattern), start, end) results = cache.get(cacheKey) if results is not None: log.debug( "RemoteFinder.find_nodes(host=%s, query=%s) using cached result" % (self.host, query)) else: url = '/metrics/find/' query_params = [ ('local', self.params.get('local', '1')), ('format', self.params.get('format', 'pickle')), ('query', query.pattern), ] if query.startTime: query_params.append(('from', int(query.startTime))) if query.endTime: query_params.append(('until', int(query.endTime))) result = self.request(url, fields=query_params, headers=query.headers, timeout=settings.FIND_TIMEOUT) try: if result.getheader('content-type') == 'application/x-msgpack': results = msgpack.load(BufferedHTTPReader( result, buffer_size=settings.REMOTE_BUFFER_SIZE), encoding='utf-8') else: results = unpickle.load( BufferedHTTPReader( result, buffer_size=settings.REMOTE_BUFFER_SIZE)) except Exception as err: self.fail() log.exception( "RemoteFinder[%s] Error decoding find response from %s: %s" % (self.host, result.url_full, err)) raise Exception("Error decoding find response from %s: %s" % (result.url_full, err)) finally: result.release_conn() cache.set(cacheKey, results, settings.FIND_CACHE_DURATION) # We don't use generator here, this function may be run as a job in a thread pool, using a generator has the following risks: # 1. Generators are lazy, if we don't iterator the returned generator in the job, the real execution(network operations, # time-consuming) are very likely be triggered in the calling thread, losing the effect of thread pool; # 2. As function execution is delayed, the job manager can not catch job runtime exception as expected/designed; nodes = [] for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet([ Interval(interval[0], interval[1]) for interval in intervals ]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader(self, node_info) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False nodes.append(node) return nodes
def fetch(self, patterns, start_time, end_time, now=None, requestContext=None): reader = RemoteReader(self, {}, bulk_query=patterns) return reader.fetch_multi(start_time, end_time, now, requestContext)
def test_RemoteReader_fetch_multi(self, http_request): test_finders = RemoteFinder.factory() finder = test_finders[0] startTime = 1496262000 endTime = 1496262060 # no path or bulk_query reader = RemoteReader(finder,{}) self.assertEqual(reader.bulk_query, []) result = reader.fetch_multi(startTime, endTime) self.assertEqual(result, []) self.assertEqual(http_request.call_count, 0) # path reader = RemoteReader(finder, {'intervals': [], 'path': 'a.b.c.d'}) data = [ {'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' } ] responseObject = HTTPResponse(body=BytesIO(pickle.dumps(data)), status=200, preload_content=False) http_request.return_value = responseObject result = reader.fetch_multi(startTime, endTime) expected_response = [ { 'pathExpression': 'a.b.c.d', 'name': 'a.b.c.d', 'time_info': (1496262000, 1496262060, 60), 'values': [1.0, 0.0, 1.0, 0.0, 1.0], } ] self.assertEqual(result, expected_response) self.assertEqual(http_request.call_args[0], ( 'GET', 'http://127.0.0.1/render/', )) self.assertEqual(http_request.call_args[1], { 'fields': [ ('format', 'pickle'), ('local', '1'), ('noCache', '1'), ('from', startTime), ('until', endTime), ('target', 'a.b.c.d'), ], 'headers': None, 'preload_content': False, 'timeout': 10, }) # bulk_query & now finder = test_finders[1] reader = RemoteReader(finder, {'intervals': [], 'path': 'a.b.c.d'}, bulk_query=['a.b.c.d']) data = [ {'start': startTime, 'step': 60, 'end': endTime, 'values': [1.0, 0.0, 1.0, 0.0, 1.0], 'name': 'a.b.c.d' } ] responseObject = HTTPResponse( body=BytesIO(msgpack.dumps(data, use_bin_type=True)), status=200, preload_content=False, headers={'Content-Type': 'application/x-msgpack'} ) http_request.return_value = responseObject result = reader.fetch_multi(startTime, endTime, now=endTime, requestContext={'forwardHeaders': {'Authorization': 'Basic xxxx'}}) expected_response = [ { 'pathExpression': 'a.b.c.d', 'name': 'a.b.c.d', 'time_info': (1496262000, 1496262060, 60), 'values': [1.0, 0.0, 1.0, 0.0, 1.0], } ] self.assertEqual(result, expected_response) self.assertEqual(http_request.call_args[0], ( 'GET', 'http://8.8.8.8/graphite/render/', )) self.assertEqual(http_request.call_args[1], { 'fields': [ ('format', 'msgpack'), ('local', '0'), ('noCache', '1'), ('from', startTime), ('until', endTime), ('target', 'a.b.c.d'), ('now', endTime), ], 'headers': {'Authorization': 'Basic xxxx'}, 'preload_content': False, 'timeout': 10, }) # non-pickle response responseObject = HTTPResponse(body=BytesIO(b'error'), status=200, preload_content=False) http_request.return_value = responseObject with self.assertRaisesRegexp(Exception, 'Error decoding response from http://[^ ]+: .+'): reader.fetch(startTime, endTime) # invalid response data data = [ {}, ] responseObject = HTTPResponse( body=BytesIO(msgpack.dumps(data, use_bin_type=True)), status=200, preload_content=False, headers={'Content-Type': 'application/x-msgpack'} ) http_request.return_value = responseObject with self.assertRaisesRegexp(Exception, 'Invalid render response from http://[^ ]+: KeyError\(\'name\',?\)'): reader.fetch(startTime, endTime) # non-200 response responseObject = HTTPResponse(body=BytesIO(b'error'), status=500, preload_content=False) http_request.return_value = responseObject with self.assertRaisesRegexp(Exception, 'Error response 500 from http://[^ ]+'): reader.fetch(startTime, endTime) # exception raised by request() http_request.side_effect = Exception('error') with self.assertRaisesRegexp(Exception, 'Error requesting http://[^ ]+: error'): reader.fetch(startTime, endTime)