def testTaskQueue(self): writer = RecordIOWriter("test") writer.create(compressed=False) test_value = test_helper.uncompressableString(MAX_ENTRY_SIZE-1) entries_to_write = MAX_BLOB_SIZE / MAX_ENTRY_SIZE + 1 for i in range(entries_to_write): writer.insert(str(i), test_value) writer.commit_async() taskq = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) tasks = taskq.GetTasks("recordio-writer") for task in tasks: url=task["url"] args = urlparse.parse_qs(base64.b64decode(task["body"])) for x in args: args[x] = args[x][0] test_helper.requestGet(WriteHandler(), url, args) assert(len([x for x in RecordIOShard.all()]) > 1) reader = RecordIOReader("test") result = {} for key, value in reader: result[key] = value self.assertEqual(len(result), entries_to_write) for i in range(entries_to_write): self.assertEqual(result[str(i)], test_value, "Not equal")
def write2MBAndReplace(self, compressed): test_string = test_helper.uncompressableString(2**21) updater = RecordIOWriter("test") updater.create(compressed=compressed) updater.insert("test", test_string) updater.commit_sync() output = [] entries = 0 shards_count = 0 for recordio in RecordIOShard.all(): self.assertTrue(len(recordio.data) >= 1000) shards_count += 1 for entry in recordio: output += [entry[-1]] entries += 1 self.assertTrue(shards_count > 1) self.assertTrue(entries > 3) self.assertEqual("".join(output), STRING + test_string, "read != write") updater.insert("test", "short") updater.commit_sync(retries=0) replaced_shards_count = 0 for recordio in RecordIOShard.all(): if replaced_shards_count == 0: self.assertEqual(1, len(recordio)) for entry in recordio: self.assertEqual(STRING + "short", entry[-1]) else: self.assertEqual(0, len(recordio)) for entry in recordio: self.fail("shouldnt be iterable") replaced_shards_count += 1 self.assertTrue(len(recordio.data) < 1000) self.assertTrue(replaced_shards_count > 0) self.assertTrue(replaced_shards_count <= shards_count)
def post(self): name = self.request.get("name") compressed = not not self.request.get("compressed") key = self.request.get("key", None) value = self.request.get("value", None) if name: writer = RecordIOWriter(name) if key == None and value == None: writer.create(compressed) elif value == None: writer.remove(key) writer.commit_sync() else: writer.insert(str(key), eval(value)) writer.commit_sync() start = None if key: start = str(key) self.redirect("?name=" + str(urllib.quote(name)) + "&start=" + urllib.quote(start)) delete = self.request.get("delete") if delete: writer = RecordIOWriter(delete) writer.delete() self.redirect("/recordio/")
def do_write(self, single, compressed, entries): start = time.time() writer = RecordIOWriter("loadtest_" + single + "_" + { True: "compressed", False: "uncompressed"}[compressed]) writer.create(compressed=compressed) for entry in entries: writer.insert(entry[0], entry[1]) writer.commit_sync(retries=10) return time.time() - start, writer.db_stats()
def do_write(self, single, compressed, entries): start = time.time() writer = RecordIOWriter("loadtest_" + single + "_" + { True: "compressed", False: "uncompressed" }[compressed]) writer.create(compressed=compressed) for entry in entries: writer.insert(entry[0], entry[1]) writer.commit_sync(retries=10) return time.time() - start, writer.db_stats()
def writeOneShard(self, compressed): updater = RecordIOWriter("test") updater.create(compressed=compressed) updater.insert("1", "foo") updater.insert("2", "bar") updater.commit_sync() updater = RecordIOWriter("test") updater.insert("3", "win") updater.remove("2") updater.commit_sync() recordio = RecordIOShard.all().get() self.assertEqual(recordio.compressed, compressed) self.assertEqual([x for x in recordio], [("1", STRING + "foo"), ("3", STRING + "win")])
def testCommitToQueueAndScheduleWrite(self): updater = RecordIOWriter("test") updater.create() updater.insert("a", "") updater.commit_async() taskq = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) tasks = taskq.GetTasks("recordio-writer") self.assertEqual(len(tasks), 1) self.assertEqual(tasks[0]["url"], "/recordio/write") self.assertEqual(base64.b64decode(tasks[0]["body"]), "taskqueue=" + urllib.quote( RecordIOShard.key_name("test")))
def testCommitToQueueSplitEntries(self): chunk_size = MAX_ENTRY_SIZE + 1 test_string = test_helper.uncompressableString(chunk_size) updater = RecordIOWriter("test") updater.create() updater.insert("test", test_string) list(updater.commit_to_queue_()) pull = taskqueue.Queue('recordio-queue') tasks = list(pull.lease_tasks(60, 100)) self.assertEqual(len(tasks), 1) self.assertEqual(tasks[0].tag, RecordIOShard.key_name("test")) updates = marshal.loads(tasks[0].payload) self.assertEqual([('test', 0, 2), ('test', 1, 2)], [x[:-2] for x in updates]) self.assertEqual(STRING + test_string, "".join([x[-1] for x in updates]))
def testWriteStringMarshalPickle(self): updater = RecordIOWriter("test") updater.create() updater.insert("string", "string") marshalable = {"a": [1,2,3]} updater.insert("marshal", marshalable) class AnyClass(): pass pickleable = AnyClass() updater.insert("cpickle", pickleable) updater.commit_sync() recordio = RecordIOShard.all().get() self.assertEqual([x for x in recordio], [("cpickle", CPICKLE + cPickle.dumps(pickleable)), ("marshal", MARSHAL + marshal.dumps(marshalable)), ("string", STRING + "string")])
def testCommitToQueue(self): updater = RecordIOWriter("test") updater.create() chunk_size = MAX_ENTRY_SIZE - 1 entries_to_write = MAX_TASKQUEUE_BATCH_SIZE / MAX_ENTRY_SIZE + 1 for i in xrange(entries_to_write): updater.insert(str("%09d" % i), test_helper.uncompressableString(chunk_size)) list(updater.commit_to_queue_()) pull = taskqueue.Queue('recordio-queue') tasks = list(pull.lease_tasks(60, 100)) self.assertEqual(len(tasks), 2) self.assertEqual(tasks[0].tag, RecordIOShard.key_name("test")) self.assertEqual(tasks[1].tag, RecordIOShard.key_name("test")) updates_0 = marshal.loads(tasks[0].payload) updates_1 = marshal.loads(tasks[1].payload) self.assertEqual([str("%09d" % x) for x in xrange(entries_to_write)], [x[0] for x in updates_0] + [x[0] for x in updates_1]) self.assertTrue(updates_0[0][1] == STRING + test_helper.uncompressableString(chunk_size))