def access_oram(self, path_to_root, data_id_of_interest=None): # TODO: lock the function to be used only in one thread at the time downloaded_data_items = self.read_path(path_to_root) data_item = self.write_stash(downloaded_data_items, data_id_of_interest) self.write_path(path_to_root) logger.info('STASH SIZE - %d' % Stash().get_stash_size()) return data_item
def write_path(self, path_to_root): for node in path_to_root: potential_data_properties = self.get_potential_data_ids_with_leaf() has_potential_item = False for potential_data_property in potential_data_properties: potential_leaf_id = abs(potential_data_property[1]) potential_path = self.path_to_root(potential_leaf_id) if node in potential_path: has_potential_item = True data_id = potential_data_property[0] data_item = Stash().get_data_item(data_id) self.write_node(node, data_item[1]) logger.info( 'WRITE PATH - upload to node %d data item with id %d' % (node, data_id)) PositionMap().update_leaf_id(data_id, True) Stash().delete_data_item(data_id) break if not has_potential_item: self.write_node(node) logger.info('WRITE PATH - upload to node %d dummy data' % node)
def download_data_items(self, data_ids): data_items = [] for data_id in data_ids: leaf_id = PositionMap().get_leaf_id(data_id) if leaf_id < 0: data_item = Stash().get_data_item(data_id) logger.info('PATH ORAM - access stash') # decrypt data item data_item = self.decrypt_data_item(data_item[1]) data_items.append(data_item) else: path_to_root = self.path_to_root(leaf_id) data_item = self.access_oram(path_to_root, data_id) data_items.append(data_item) return data_items
def split(self, file_name, file_input): logger.info('length of the selected file %d ' % len(file_input)) data_ids = [] for x in range(0, len(file_input), config.BLOCK_SIZE): if self.data_id_counter == config.DUMMY_ID: self.data_id_counter += 1 data_id = self.data_id_counter self.data_id_counter += 1 data_ids.append(data_id) chunk = file_input[x:config.BLOCK_SIZE + x] logger.info('chunk size is %d after splitting' % len(chunk)) if len(chunk) != config.BLOCK_SIZE: logger.info('chunk is smaller than the block size, add padding here') chunk = chunk.rjust(config.BLOCK_SIZE, PADDING) logger.info('chunk size %d after padding' % len(chunk)) token = self.aes_crypto.encrypt(chunk, data_id) logger.info('chunk size is %d after encryption' % len(token)) Stash().add_file(data_id, token) PositionMap().add_data(data_id) FileMap().add_file(file_name, len(file_input), data_ids, self.data_id_counter)
def write_stash(self, downloaded_data_items, data_id_of_interest=None): data_item_of_interest = None for downloaded_data_item in downloaded_data_items: try: data_id, plaintext = self.decrypt_data_item( downloaded_data_item) if PositionMap().data_id_exist(data_id): logger.info( 'WRITE STASH - downloaded data item with id %d' % data_id) token = self.aes_crypto.encrypt(plaintext, data_id) Stash().add_file(data_id, token) if data_id_of_interest is not None and data_id_of_interest == data_id: PositionMap().choose_new_leaf_id(data_id) data_item_of_interest = data_id, plaintext else: PositionMap().update_leaf_id(data_id, False) except DummyFileFound: logger.info('WRITE STASH - downloaded dummy file') pass return data_item_of_interest
def delete_selected_node(filename): data_ids = FileMap().get_data_ids_of_file(filename) PositionMap().delete_data_ids(data_ids) Stash().delete_data_items(data_ids) FileMap().delete_file(filename)
def setup_stash(): Stash()
def get_potential_data_ids_with_leaf(self): potential_data_ids = Stash().get_potential_data_id() potential_data_properties = PositionMap().get_leaf_ids( potential_data_ids) return potential_data_properties