def test_storage_session(self): self._generate_token_credentials() self.logi("using endpoint url %s" % (self.endpoint_url, )) # Always GET info/collections self.setOkCodes([200, 404]) url = self.endpoint_url + "/info/collections" response = self.get(url) # GET requests to meta/global. num_requests = self._pick_weighted_count(metaglobal_count_distribution) self.setOkCodes([200, 404]) for x in range(num_requests): url = self.endpoint_url + "/storage/meta/global" response = self.get(url) if response.code == 404: metapayload = "This is the metaglobal payload which contains"\ " some client data that doesnt look much"\ " like this" data = json.dumps({"id": "global", "payload": metapayload}) data = Data('application/json', data) self.setOkCodes([200, 201]) self.put(url, params=data) # GET requests to individual collections. num_requests = self._pick_weighted_count(get_count_distribution) cols = random.sample(collections, num_requests) self.setOkCodes([200, 404]) for x in range(num_requests): url = self.endpoint_url + "/storage/" + cols[x] newer = int(time.time() - random.randint(3600, 360000)) params = {"full": "1", "newer": str(newer)} self.logi("about to GET (x=%d) %s" % (x, url)) response = self.get(url, params) # PUT requests with 100 WBOs batched together num_requests = self._pick_weighted_count(post_count_distribution) cols = random.sample(collections, num_requests) self.setOkCodes([200]) for x in range(num_requests): url = self.endpoint_url + "/storage/" + cols[x] data = [] items_per_batch = 10 for i in range(items_per_batch): id = base64.urlsafe_b64encode(os.urandom(10)).rstrip("=") id += str(int((time.time() % 100) * 100000)) payload = self.auth_token * random.randint(50, 200) wbo = {'id': id, 'payload': payload} data.append(wbo) data = json.dumps(data) data = Data('application/json', data) self.logi("about to POST (x=%d) %s" % (x, url)) response = self.post(url, params=data) body = response.body self.assertTrue(body != '') result = json.loads(body) self.assertEquals(len(result["success"]), items_per_batch) self.assertEquals(len(result["failed"]), 0) # DELETE requests. # We might choose to delete some individual collections, or to do # a full reset and delete all the data. Never both in the same run. num_requests = self._pick_weighted_count(delete_count_distribution) self.setOkCodes([204]) if num_requests: cols = random.sample(collections, num_requests) for x in range(num_requests): url = self.endpoint_url + "/storage/" + cols[x] self.delete(url) else: if random.random() <= deleteall_probability: url = self.endpoint_url + "/storage" self.delete(url)
def test_storage_session(self): self._generate_token_credentials() self.logi("using endpoint url %s" % (self.endpoint_url,)) # Always GET info/collections self.setOkCodes([200, 404]) url = self.endpoint_url + "/info/collections" response = self.get(url) # GET requests to meta/global. num_requests = self._pick_weighted_count(metaglobal_count_distribution) self.setOkCodes([200, 404]) for x in range(num_requests): url = self.endpoint_url + "/storage/meta/global" response = self.get(url) if response.code == 404: metapayload = "This is the metaglobal payload which contains"\ " some client data that doesnt look much"\ " like this" data = json.dumps({"id": "global", "payload": metapayload}) data = Data('application/json', data) self.setOkCodes([200, 201]) self.put(url, params=data) # GET requests to individual collections. num_requests = self._pick_weighted_count(get_count_distribution) cols = random.sample(collections, num_requests) self.setOkCodes([200, 404]) for x in range(num_requests): url = self.endpoint_url + "/storage/" + cols[x] newer = int(time.time() - random.randint(3600, 360000)) params = {"full": "1", "newer": str(newer)} self.logi("about to GET (x=%d) %s" % (x, url)) response = self.get(url, params) # PUT requests with 100 WBOs batched together num_requests = self._pick_weighted_count(post_count_distribution) cols = random.sample(collections, num_requests) self.setOkCodes([200]) for x in range(num_requests): url = self.endpoint_url + "/storage/" + cols[x] data = [] items_per_batch = 10 for i in range(items_per_batch): id = base64.urlsafe_b64encode(os.urandom(10)).rstrip("=") id += str(int((time.time() % 100) * 100000)) payload = self.auth_token * random.randint(50, 200) wbo = {'id': id, 'payload': payload} data.append(wbo) data = json.dumps(data) data = Data('application/json', data) self.logi("about to POST (x=%d) %s" % (x, url)) response = self.post(url, params=data) body = response.body self.assertTrue(body != '') result = json.loads(body) self.assertEquals(len(result["success"]), items_per_batch) self.assertEquals(len(result["failed"]), 0) # DELETE requests. # We might choose to delete some individual collections, or to do # a full reset and delete all the data. Never both in the same run. num_requests = self._pick_weighted_count(delete_count_distribution) self.setOkCodes([204]) if num_requests: cols = random.sample(collections, num_requests) for x in range(num_requests): url = self.endpoint_url + "/storage/" + cols[x] self.delete(url) else: if random.random() <= deleteall_probability: url = self.endpoint_url + "/storage" self.delete(url)
def test_storage_session(self): username = self._pick_user() password = "******" node = self._pick_node() self.logi("choosing node %s" % (node)) self.setBasicAuth(username, password) # Always GET /username/info/collections self.setOkCodes([200, 404]) url = node + "/%s/%s/info/collections" % (VERSION, username) response = self.get(url) # GET requests to meta/global. num_requests = self._pick_weighted_count(metaglobal_count_distribution) self.setOkCodes([200, 404]) for x in range(num_requests): url = node + "/%s/%s/storage/meta/global" % (VERSION, username) response = self.get(url) if response.code == 404: metapayload = "This is the metaglobal payload which contains"\ " some client data that doesnt look much"\ " like this" data = json.dumps({"id": "global", "payload": metapayload}) data = Data('application/json', data) self.put(url, params=data) # GET requests to individual collections. num_requests = self._pick_weighted_count(get_count_distribution) cols = random.sample(collections, num_requests) self.setOkCodes([200, 404]) for x in range(num_requests): url = node + "/%s/%s/storage/%s" % (VERSION, username, cols[x]) newer = int(time.time() - random.randint(3600, 360000)) params = {"full": "1", "newer": str(newer)} self.logi("about to GET (x=%d) %s" % (x, url)) response = self.get(url, params) # PUT requests with 100 WBOs batched together num_requests = self._pick_weighted_count(post_count_distribution) cols = random.sample(collections, num_requests) self.setOkCodes([200]) for x in range(num_requests): url = node + "/%s/%s/storage/%s" % (VERSION, username, cols[x]) data = [] items_per_batch = 10 for i in range(items_per_batch): id = base64.b64encode(os.urandom(10)) id += str(time.time() % 100) payload = username * random.randint(50, 200) wbo = {'id': id, 'payload': payload} data.append(wbo) data = json.dumps(data) data = Data('application/json', data) self.logi("about to POST (x=%d) %s" % (x, url)) response = self.post(url, params=data) body = response.body self.assertTrue(body != '') result = json.loads(body) self.assertEquals(len(result["success"]), items_per_batch) self.assertEquals(len(result["failed"]), 0) # DELETE requests. # We might choose to delete some individual collections, or to do # a full reset and delete all the data. Never both in the same run. num_requests = self._pick_weighted_count(delete_count_distribution) self.setOkCodes([200]) if num_requests: cols = random.sample(collections, num_requests) for x in range(num_requests): url = node + "/%s/%s/storage/%s" % (VERSION, username, cols[x]) self.delete(url) else: if random.random() <= deleteall_probability: url = node + "/%s/%s/storage" % (VERSION, username) self.setHeader("X-Confirm-Delete", "true") self.delete(url)