def setUp(self): """ Makes GlobusResponses wrapped around known data for testing """ super(GlobusResponseTests, self).setUp() self.dict_data = {"label1": "value1", "label2": "value2"} self.dict_response = GlobusResponse(self.dict_data) self.list_data = ["value1", "value2", "value3"] self.list_response = GlobusResponse(self.list_data)
def iterable_func(self): """ An internal function which has generator semantics. Defined using the `yield` syntax. Used to grab the first element during class initialization, and subsequently on calls to `next()` to get the remaining elements. We rely on the implicit StopIteration built into this type of function to propagate through the final `next()` call. """ # BFS is not done until the queue is empty while self.queue: logger.debug(("recursive_operation_ls BFS queue not empty, " "getting next path now.")) # rate limit based on number of ls calls we have made self.ls_count += 1 if self.ls_count % SLEEP_FREQUENCY == 0: logger.debug(("recursive_operation_ls sleeping {} seconds to " "rate limit itself.".format(SLEEP_LEN))) time.sleep(SLEEP_LEN) # get path and current depth from the queue abs_path, rel_path, depth = self.queue.pop() # set the target path to the popped absolute path if it exists if abs_path: self.ls_params["path"] = abs_path # if filter_after_first is False, stop filtering after the first # ls call has been made if not self.filter_after_first: if self.filtering: self.filtering = False else: try: self.ls_params.pop("filter") except KeyError: pass # do the operation_ls with the updated params res = self.client.operation_ls(self.endpoint_id, **self.ls_params) res_data = res["DATA"] # if we aren't at the depth limit, add dir entries to the queue. # including the dir's name in the absolute and relative paths # and increase the depth by one. # data is reversed to maintain any "orderby" ordering if depth < self.max_depth: self.queue.extend( [(res["path"] + item["name"], (rel_path + "/" if rel_path else "") + item["name"], depth + 1) for item in reversed(res_data) if item["type"] == "dir"]) # for each item in the response data update the item's name with # the relative path popped from the queue, and yield the item for item in res_data: item["name"] = ( rel_path + "/" if rel_path else "") + item["name"] yield GlobusResponse(item)
def iterable_func(self): """ An internal function which has generator semantics. Defined using the `yield` syntax. Used to grab the first element during class initialization, and subsequently on calls to `next()` to get the remaining elements. We rely on the implicit StopIteration built into this type of function to propagate through the final `next()` call. This method is the real workhorse of this entire module. """ # now, cap the limit per request to the max per request size limit = min(self.num_results, self.max_results_per_call) has_next_page = True while has_next_page: logger.debug(("PaginatedResource should have more results, " "requesting them now")) # if we're about to request more results than the user asked # for, limit ourselves on the last paginated call to the API if self.offset + limit > self.num_results: limit = self.num_results - self.offset if not self.client_kwargs['params']: self.client_kwargs['params'] = {} self.client_kwargs['params']['offset'] = self.offset self.client_kwargs['params']['limit'] = limit # fetch a page of results and walk them, yielding them as the # iterated elements wrapped in GlobusResponse objects # nicely, the __getitem__ for GlobusResponse will work on raw # dicts, so these handle well res = self.client_method(self.client_path, **self.client_kwargs) for item in res: yield GlobusResponse(item) self.offset += self.max_results_per_call # do we have another page of results to fetch? if self.paging_style == self.PAGING_STYLE_HAS_NEXT: # set to False if we've reached the given limit has_next_page = res['has_next_page'] elif self.paging_style == self.PAGING_STYLE_TOTAL: has_next_page = self.offset < res['total'] else: logger.error( "PaginatedResource.paging_style={} is invalid".format( self.paging_style)) raise ValueError( 'Invalid Paging Style Given to PaginatedResource') has_next_page = has_next_page and self.offset < self.num_results
def iterable_func(self): """ An internal function which has generator semantics. Defined using the `yield` syntax. Used to grab the first element during class initialization, and subsequently on calls to `next()` to get the remaining elements. We rely on the implicit StopIteration built into this type of function to propagate through the final `next()` call. This method is the real workhorse of this entire module. """ if not self.client_kwargs['params']: self.client_kwargs['params'] = {} # to start with, cap the limit per request to the max per request size self.limit = self.max_results_per_call if self.num_results is not None: self.limit = min(self.num_results, self.limit) def _set_params_for_next_call(): # if we're about to request more results than the user asked # for, limit ourselves on the last paginated call to the API if (self.num_results is not None and self.offset + self.limit > self.num_results): self.limit = self.num_results - self.offset # all paging styles support limit # MARKER doesn't have it documented, but it is in fact supported self.client_kwargs['params']['limit'] = self.limit # if the paging is done by marker, just carry over the marker if self.paging_style == self.PAGING_STYLE_MARKER: if self.next_marker: self.client_kwargs['params']['marker'] = (self.next_marker) elif self.paging_style == self.PAGING_STYLE_LAST_KEY: if self.next_marker: self.client_kwargs['params']['last_key'] = self.next_marker # these params work for all paging styles *except* MARKER # and LAST_KEY else: self.client_kwargs['params']['offset'] = self.offset def _check_has_next_page(res): """ Check that the API says there are more results available. Additionally, update the PaginatedResource.maker or PaginatedResource.offset based on the response """ # if the paging style is LAST_KEY, check has_next_page if self.paging_style == self.PAGING_STYLE_LAST_KEY: self.next_marker = res.get('last_key') return res['has_next_page'] # if the paging style is MARKER, look at the marker if self.paging_style == self.PAGING_STYLE_MARKER: # marker may be 0, null, or absent if no more results # API docs aren't 100% clear -- looks like 0 is what we should # expect, but we'll also accept null or absent to be safe self.next_marker = res.get('next_marker') return bool(self.next_marker) # start doing the offset maths and see if we have another page to # fetch # step size is the number of results per call -- we'll catch this # "walking off the end" of the requested results afterwards self.offset += self.max_results_per_call # if it's HAS_NEXT, the check is easy, as it's explicitly part of # the response if self.paging_style == self.PAGING_STYLE_HAS_NEXT: # just return the has_next_page value return res['has_next_page'] # if paging is TOTAL oriented, check if we've reached the total if self.paging_style == self.PAGING_STYLE_TOTAL: return self.offset < res['total'] logger.error("PaginatedResource.paging_style={} is invalid".format( self.paging_style)) raise ValueError('Invalid Paging Style Given to PaginatedResource') has_next_page = True while has_next_page: logger.debug(("PaginatedResource should have more results, " "requesting them now")) _set_params_for_next_call() # fetch a page of results and walk them, yielding them as the # iterated elements wrapped in GlobusResponse objects # nicely, the __getitem__ for GlobusResponse will work on raw # dicts, so these handle well res = self.client_method(self.client_path, **self.client_kwargs) for item in res: yield GlobusResponse(item, client=self.client_object) # increment the "num results" counter self.num_results_fetched += 1 # ensure that even if the paging style requires that we fetch # more results than were requested, we still only yield the # number that were requested -- returning here will result in a # StopIteration because this is a generator function # CAREFUL! make sure we catch num_results_fetched==num_results # otherwise, we could end up making one-too-many API calls if (self.num_results is not None and self.num_results_fetched >= self.num_results): return has_next_page = _check_has_next_page(res)
def list_response(): data = ["value1", "value2", "value3"] return _TestResponse(data, GlobusResponse(data))
def dict_response(): data = {"label1": "value1", "label2": "value2"} return _TestResponse(data, GlobusResponse(data))
class GlobusResponseTests(CapturedIOTestCase): def setUp(self): """ Makes GlobusResponses wrapped around known data for testing """ super(GlobusResponseTests, self).setUp() self.dict_data = {"label1": "value1", "label2": "value2"} self.dict_response = GlobusResponse(self.dict_data) self.list_data = ["value1", "value2", "value3"] self.list_response = GlobusResponse(self.list_data) def test_data(self): """ Gets the data from the GlobusResponses, confirms results """ self.assertEqual(self.dict_response.data, self.dict_data) self.assertEqual(self.dict_response.data, self.dict_data) def test_str(self): """ Confirms that individual values are seen in data """ for item in self.dict_data: self.assertTrue(item in self.dict_response) self.assertFalse("nonexistent" in self.dict_response) for item in self.list_data: self.assertTrue(item in self.list_response) self.assertFalse("nonexistant" in self.list_response) def test_getitem(self): """ Confirms that values can be accessed from the GlobusResponse """ for key in self.dict_data: self.assertEqual(self.dict_response[key], self.dict_data[key]) for i in range(len(self.list_data)): self.assertEqual(self.list_response[i], self.list_data[i]) def test_contains(self): """ Confirms that individual values are seen in the GlobusResponse """ for item in self.dict_data: self.assertTrue(item in self.dict_response) self.assertFalse("nonexistant" in self.dict_response) for item in self.list_data: self.assertTrue(item in self.list_response) self.assertFalse("nonexistant" in self.list_response) def test_get(self): """ Gets individual values from dict response, confirms results Confirms list response correctly fails as non indexable """ for item in self.dict_data: self.assertEqual(self.dict_response.get(item), self.dict_data.get(item)) with self.assertRaises(AttributeError): self.list_response.get("value1")