def add(self, file_path_list: list, tag_list: list, ip: str, port: int): file_list = [x.split('/')[-1] for x in file_path_list] file_id_list = [hash_str(x) for x in file_list] tag_id_list = [hash_str(x) for x in tag_list] for file_path, file, file_id in zip(file_path_list, file_list, file_id_list): node = self.dht.find_successor(file_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) #print([file], [ip], [port]) #print(node_storage_ro) node_storage_ro.upload(file, file_path, ip, port) # upload file node_storage_ro.add_file_to_tags(file, tag_list) #print('ok') for tag, tag_id in zip(tag_list, tag_id_list): node = self.dht.find_successor(tag_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) node_storage_ro.add_tag_to_files(tag_id, file_id_list)
def delete_tags(self, tag_list, tag_list2): tag_id_list = [hash_str(x) for x in tag_list] tag_id_list2 = [hash_str(x) for x in tag_list2] file_list = None for tag, tag_id in zip(tag_list, tag_id_list): node = self.dht.find_successor(tag_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) files = node_storage_ro.get_files_of_tag(tag_id) if file_list is None: file_list = files else: file_list.intersection_update(files) for file_id in file_list: node = self.dht.find_successor(file_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) node_storage_ro.remove_tags_of_file(file_id, tag_list2) for tag2, tag_id2 in zip(tag_list2, tag_id_list2): node = self.dht.find_successor(tag_id2) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) node_storage_ro.remove_files_of_tag(tag_id2, file_list)
def listx(self, tag_list): tag_id_list = [hash_str(x) for x in tag_list] file_list = None for tag, tag_id in zip(tag_list, tag_id_list): node = self.dht.find_successor(tag_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) files = node_storage_ro.get_files_of_tag(tag_id) if file_list is None: file_list = files else: file_list.intersection_update(files) file_names = {} for file_id in file_list: node = self.dht.find_successor(file_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) #print([file_id]) file = node_storage_ro.get_real_name(file_id) #print('ok2') if file is not None: file_names[file] = node_storage_ro.get_tags_of_file(file) # dame el nombre del archivo y guardalo en file_names por denis return file_names
def get_contents(self): if self.state in (self.State.PARSED, self.State.PROCESSED): with open('./datastore/object_store/parsed_'+hash_str(self.url), 'rb') as f: data = pickle.load(f) return data else: raise Exception('Not parsed yet')
def get_raw_contents(self): if self.state != self.State.UNFETCHED: with open('./datastore/object_store/raw_'+hash_str(self.url), 'r') as f: data=f.read() return data else: raise Exception('Not fetched yet')
def delete(self, tag_list): tag_id_list = [hash_str(x) for x in tag_list] file_list = None for tag, tag_id in zip(tag_list, tag_id_list): node = self.dht.find_successor(tag_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) files = node_storage_ro.get_files_of_tag(tag_id) if file_list is None: file_list = files else: file_list.intersection_update(files) for file_id in file_list: node = self.dht.find_successor(file_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) #print([file_id]) node_storage_ro.del_files([file_id]) # del files #print('ok1') for tag, tag_id in zip(tag_list, tag_id_list): node = self.dht.find_successor(tag_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) node_storage_ro.remove_files_of_tag(tag_id, file_list)
def load_from_file_tags_backup(self, other_id: int, p_id: int): load_keys = [key for key in self.file_tags_backups.keys() if inrange(hash_str(key), other_id, p_id, (False, True))] print(f'Loading files {load_keys} from backup') for file in load_keys: tag_list = self.file_tags_backups.pop(file) self.file_tags[file] = tag_list
def download(self, file_name): file_id = hash_str(file_name) node = self.dht.find_successor(file_id) node_ro = get_remote(node) node_storage = node_ro.get_remote_storage() node_storage_ro = get_remote(node_storage) data = node_storage_ro.download(file_id) for d in data: yield d
def release_file_tags(self, p_id: int, remote_storage): release_keys = [key for key in self.file_tags.keys() if inrange(hash_str(key), p_id, remote_storage.id, (False, True))] print(f'Releasing files {release_keys} to {remote_storage.id}') try: remote_storage_ro = get_remote(remote_storage) for file in release_keys: tag_list = self.file_tags.pop(file) remote_storage_ro.add_file_to_tags(file, tag_list) except Exception as ex: print(ex)
def do_login(): postdata = request.body.read() dici = translate(postdata) res = check_login(dici['user'], dici['pass']) if res != False: token = hash_str(dici['user']) response.set_cookie("token", token) save_token(dici['user'], token) return "true" else: return "false"
def upload_to_files(self, realname, ip, port): name = hash_str(realname) #print(name, realname) if name in self.files: #print('FileExistsErrorOnBackup') return self.files[name] = realname try: with open(self._full_storage_path(realname), "wb+") as r: with Pyro4.Proxy(f'PYRO:{ip}:{port}@{ip}:{port}') as p: data = p.download(name) for d in data: r.write(d) except Exception as x: del self.files[name] print(x) return
def upload(self, realname, realpath, ip, port): name = hash_str(realname) #print(name, realname) if self._is_uploading(name) or self._is_uploaded(name): print('FileExistsError') return self._add_uploading(name, realname) try: with open(self._full_storage_path(realname), "wb+") as r: with Pyro4.Proxy(f'PYRO:client@{ip}:{port}') as p: data = p.read(realpath) for d in data: r.write(d) except Exception as x: self._del_uploading(name) print(x) return self._del_uploading_add_files(name)
def set_contents(self, contents): with open('./datastore/object_store/parsed_'+hash_str(self.url), 'wb') as f: data = pickle.dump(contents, f)
def __hash__(self): return hash_str(f'{self.ip}:{self.port}')
def __init__(self, votee_key, parent_vote_id): self.votee_key = votee_key self.cast_vote_count = 1 self.parent_vote_id = parent_vote_id self.vo_id = hash_str(self.get_id_string())
def calculate_hash(self): return hash_str(self.get_block_string())
def save_raw_contents(self, contents): with open('./datastore/object_store/raw_'+hash_str(self.url), 'w') as f: f.write(contents)