def test_two(self): store = Store("./test.json") key, value = "1", {"a": 1} store.create(key, value) with pytest.raises(KeyError): store.create(key, value) store.delete(key)
def test_four(self): store = Store("./test.json") key, value, ttl = "k", {"a": 1}, 5 store.create(key, value, ttl) time.sleep(ttl - 1) assert json.loads(store.read(key)) == value store.delete(key)
def test_two(self): store = Store("./test.json") key, value, ttl = "k", {"a": 1}, 5 store.create(key, value, ttl) time.sleep(ttl + 1) with pytest.raises(KeyError): store.delete(key)
class TestTracker(unittest.TestCase): def setUp(self): self.task = Task("test_task") self.store = Store(file="task_store_test.csv") self.tracker = Tracker(self.task, store=self.store) self.store.delete() def test_starting_a_new_task_persists_it_to_the_store(self): self.tracker.start() with open(self.store.file) as f: tasks = f.readlines() self.assertEqual(len(tasks), 1) def test_starting_a_new_task_returns_the_started_task(self): task = self.tracker.start() self.assertEqual(task, self.task) self.assertTrue(self.task.start_date)
def test_record_life_cycle(): id = '01234567890-c01' initial_record = dict(id=id, title='hello world', description='etc.') updated_record = dict(id=id, title='another title') store = Store() # empty store assert store.get(id=id) is None store.put(**initial_record) record = store.get(id=id) assert record['title'] == initial_record['title'] assert record['description'] == initial_record['description'] store.put(**updated_record) record = store.get(id=id) assert record['title'] == updated_record['title'] assert record.get('description', None) is None store.delete(id=id) assert store.get(id=id) is None
def test_one(self): store = Store("./test.json") key, value = "k", {"a": 1} store.create(key, value) assert json.loads(store.read(key)) == value store.delete(key)
def test_one(self): store = Store("./test.json") key, value = "1", {"a": 1} assert store.create(key, value) == None store.delete(key)
def test_nine(self): store = Store("./test.json") key, value, ttl = "1", {"a": 1}, 10 assert store.create(key, value, ttl) == None store.delete("1")
def test_four(self): store = Store("./test.json") with pytest.raises(KeyError): store.delete("k")
class KvPaxosServer: def __init__(self): with open(str(ME) + 'log.txt', 'w') as file: file.write('log:\n') self.px = Paxos.make(HOSTS_LIST, ME) self.http_server = BaseHTTPServer.HTTPServer( (HOST, int(PORT)), MyHTTPRequestHandler) self.kvstore = Store() self.keep_running = True self.lock = Lock() # do not modify these 2 vars outside the execute() function # lock required to access these values. # maybe unnecessary self.executed_paxos_no = 0 # contains all executed operations and their return values self.operation_log = [] self.processed_requestid = dict() def start(self): log("HTTP Server Starts - %s:%s" % (HOST, PORT)) maintainance_thread = Thread(target=self.maintainance, name='maintainance') maintainance_thread.start() try: while self.keep_running: # self.http_server.serve_forever() self.http_server.handle_request() except KeyboardInterrupt: sys.exit(0) maintainance_thread.join() os._exit(0) def maintainance(self): while self.keep_running: while self.px.max() < self.executed_paxos_no and self.keep_running: # print 'maintainance sleep' time.sleep(MAX_SLEEP_TIME) if self.keep_running: # print 'maintainance execute', self.px.max(), self.executed_paxos_no self.execute(self.executed_paxos_no, None) ''' this function is only called by the handler class, & the maintainance thread''' def execute(self, seq, requestid): # catch up. this ensures that operations are executed in ascending order while self.executed_paxos_no < seq: time.sleep(MAX_SLEEP_TIME) with self.lock: # print 'lock acquired ============================================================' # if seq < self.executed_paxos_no: # # the operations is done by other threads already, check the log directly # return operation_log[seq].return_value if self.processed_requestid.has_key(requestid): assert self.processed_requestid[requestid] < self.executed_paxos_no return self.operation_log[self.processed_requestid[requestid]].return_value # this request has not been executed yet, or is being executed by maintainance thread. while True: # since not executed, it cannot be forgotten decided, op_jstr = self.px.status(seq) if decided: break else: # print 'waiting for decided value', seq, op_jstr time.sleep(MAX_SLEEP_TIME) op = Operation.decode(op_jstr) assert decided if self.processed_requestid.has_key(op.requestid): success, value = self.operation_log[self.processed_requestid[op.requestid]].return_value else: if op.op_type == 'GET': success, value = self.kvstore.get(op.key) elif op.op_type == 'INSERT': success, value = self.kvstore.insert(op.key, op.value) elif op.op_type == 'DELETE': success, value = self.kvstore.delete(op.key) elif op.op_type == 'UPDATE': success, value = self.kvstore.update(op.key, op.value) self.executed_paxos_no += 1 # self.px.done(seq) op.done((success, value)) self.operation_log += [op] assert (not self.processed_requestid.has_key(op.requestid)) or requestid is None if not self.processed_requestid.has_key(op.requestid): self.processed_requestid[op.requestid] = seq # print self.processed_requestid with open(str(ME) + 'log.txt', 'a') as file: file.write(op.encode() + '\n')# + str(self.processed_requestid) + '\n') return success, value def handle_shutdown(self): self.keep_running = False
print("Time Per Allocaiton (s): " + str(avg_time / store.count)) # Measure time to read avg_time = 0 for k in keys: start = time.time() tmp = store.get(k) end = time.time() avg_time += (end - start) print("Time Per Read (s): " + str(avg_time / store.count)) # Measure time to remove avg_time = 0 for k in keys: start = time.time() store.delete(k) end = time.time() avg_time += (end - start) print("Time Per Delete (s): " + str(avg_time / len(keys))) # # Testing API service # print("Testing API service, persistent connection, no concurrency") client = storeClient.StoreClient() client.connect() # Service/API Put time to test avg_time = 0 times = []
def test_one(self): store = Store("./test.json") key, value, ttl = "k", {"a": 1}, 5 store.create(key, value, ttl) assert store.delete(key) == None
class KvPaxosServer: def __init__(self): with open(str(ME) + 'log.txt', 'w') as file: file.write('log:\n') self.px = Paxos.make(HOSTS_LIST, ME) self.http_server = BaseHTTPServer.HTTPServer((HOST, int(PORT)), MyHTTPRequestHandler) self.kvstore = Store() self.keep_running = True self.lock = Lock() # do not modify these 2 vars outside the execute() function # lock required to access these values. # maybe unnecessary self.executed_paxos_no = 0 # contains all executed operations and their return values self.operation_log = [] self.processed_requestid = dict() def start(self): log("HTTP Server Starts - %s:%s" % (HOST, PORT)) maintainance_thread = Thread(target=self.maintainance, name='maintainance') maintainance_thread.start() try: while self.keep_running: # self.http_server.serve_forever() self.http_server.handle_request() except KeyboardInterrupt: sys.exit(0) maintainance_thread.join() os._exit(0) def maintainance(self): while self.keep_running: while self.px.max() < self.executed_paxos_no and self.keep_running: # print 'maintainance sleep' time.sleep(MAX_SLEEP_TIME) if self.keep_running: # print 'maintainance execute', self.px.max(), self.executed_paxos_no self.execute(self.executed_paxos_no, None) ''' this function is only called by the handler class, & the maintainance thread''' def execute(self, seq, requestid): # catch up. this ensures that operations are executed in ascending order while self.executed_paxos_no < seq: time.sleep(MAX_SLEEP_TIME) with self.lock: # print 'lock acquired ============================================================' # if seq < self.executed_paxos_no: # # the operations is done by other threads already, check the log directly # return operation_log[seq].return_value if self.processed_requestid.has_key(requestid): assert self.processed_requestid[ requestid] < self.executed_paxos_no return self.operation_log[ self.processed_requestid[requestid]].return_value # this request has not been executed yet, or is being executed by maintainance thread. while True: # since not executed, it cannot be forgotten decided, op_jstr = self.px.status(seq) if decided: break else: # print 'waiting for decided value', seq, op_jstr time.sleep(MAX_SLEEP_TIME) op = Operation.decode(op_jstr) assert decided if self.processed_requestid.has_key(op.requestid): success, value = self.operation_log[self.processed_requestid[ op.requestid]].return_value else: if op.op_type == 'GET': success, value = self.kvstore.get(op.key) elif op.op_type == 'INSERT': success, value = self.kvstore.insert(op.key, op.value) elif op.op_type == 'DELETE': success, value = self.kvstore.delete(op.key) elif op.op_type == 'UPDATE': success, value = self.kvstore.update(op.key, op.value) self.executed_paxos_no += 1 # self.px.done(seq) op.done((success, value)) self.operation_log += [op] assert (not self.processed_requestid.has_key( op.requestid)) or requestid is None if not self.processed_requestid.has_key(op.requestid): self.processed_requestid[op.requestid] = seq # print self.processed_requestid with open(str(ME) + 'log.txt', 'a') as file: file.write(op.encode() + '\n') # + str(self.processed_requestid) + '\n') return success, value def handle_shutdown(self): self.keep_running = False
class Records: def __init__(self, salt='azerty'): self.records = Store() self.salt = salt # for pagination tokens self.forbidden_attributes = [ 'bearer', 'secret', 'record_has_been_loaded' ] def _encode_token(self, token): return base64.b64encode(bytes(self.salt + str(token), encoding='utf-8')).decode() def _decode_token(self, token): if token == 'EOF': raise ValueError(f"End of file") if token is None or token == 'None': return 0 decoded = base64.b64decode(token.encode()) token = int(decoded[len(self.salt):]) if token < 1: token = 0 return token def chunk(self, token=None, count=10): token = self._decode_token(token) slice = [ x for x in islice(self.records.ids_by('stamp'), token, token + count) ] actual = len(slice) records = [self.records.get(id) for id in slice] next = self._encode_token(token + count) if actual == count else 'EOF' class Chunk: def __init__(self, **kwargs): self.__dict__.update(kwargs) return Chunk(records=records, count=actual, token=next) def read(self, id): return self.records.get(id, None) def write(self, id=None, **kwargs): id = id or uuid() record = self.records.get(id, {}) for (key, value) in kwargs.items(): if key not in self.forbidden_attributes: record[key] = value record['stamp'] = str(time()) record.pop('id', None) self.records.put(id, **record) return id def delete(self, id): self.records.delete(id) def count(self): return len(self.records.ids()) def scan(self): for id in self.records.ids(): yield self.records.get(id) def dump(self): return [x for x in self.scan()] def load(self, iterator, append=True): if not append: self.records = Store() count = 0 for record in iterator: record['record_has_been_loaded'] = True self.write(**record) count += 1 return count
def delete_domain(domain): store = Store() store.delete(domain) flash("%s deleted" % domain) fanout.update() return redirect(url("domains"))
def test_three(self): store = Store("./test.json") key, value, ttl = "k", {"a": 1}, 5 store.create(key, value, ttl) time.sleep(ttl - 1) assert store.delete(key) == None
def test_delete(self): Store.set('name', 'Mike') self.assertEqual('Mike', Store.get('name')) Store.delete('name') self.assertNotIn('name', Store._store.keys())
def POST(self,id): from store import Store store = Store() data = json.dumps(store.delete(id)) store.close() return data