def update_or_create(cls, node: StoredNode, chunk: common.NodeChunk) -> NodeChunk: with atomic(): chunk_db_instance = Chunk.update_or_create(chunk.hash, chunk.size) node_chunk, _ = cls.get_or_create( node=node, chunk=chunk_db_instance, defaults={"offset": chunk.offset}) if node_chunk.offset != chunk.offset: node_chunk.offset = chunk.offset node_chunk.save() return node_chunk
def exchange_with_db(self): with atomic(): db_instance = models.Market.find(self.namespace, self.key) if db_instance is None: models.Market.create( namespace=models.Namespace.by_name(self.namespace), key=self.key, data=self.dump() ) else: other = Market.load(db_instance.data) self.merge(other) db_instance.data = self.dump() db_instance.save()
def create_node_placeholder(remote_node: RemoteNode, session: Session) -> FullNode: with atomic(): temp_path = str(uuid4()) stored_node = StoredNode.create( namespace=remote_node.namespace, root_folder=RootFolder.for_session(session), key=hash_path(temp_path), path=temp_path, checksum=remote_node.checksum, size=remote_node.size, local_modified_time=0, local_created_time=0, ready=False, signature=remote_node.signature ) local_node = LocalNode.create_placeholder( stored_node.local_path, stored_node.size, session ) all_chunks = [NodeChunk(**c) for c in remote_node.chunks] chunk_index = index_by("hash")(all_chunks) needed_chunks: Set[str] = set() available_chunks: Set[str] = set() for chunk_hash, chunks in chunk_index.items(): node_chunk_pair = NodeChunkModel.find(session.namespace, chunk_hash) if node_chunk_pair: available_chunk, read_chunk = node_chunk_pair logging.info( "[CHUNK] found local chunk for node [%s]: [%r]", local_node.path, available_chunk ) for chunk in chunks: local_node.write_chunk(chunk, read_chunk()) available_chunks.add(chunk_hash) else: needed_chunks.add(chunk_hash) StoredNode.delete().where(StoredNode.key == remote_node.key).execute() stored_node.key = remote_node.key stored_node.path = remote_node.path stored_node.save() shutil.move(local_node.local_path, stored_node.local_path) local_node = LocalNode.create(stored_node.local_path, session) return FullNode(stored_node, local_node, all_chunks, chunk_index, needed_chunks, available_chunks)
def store_new_node( local_node: LocalNode, session: Session, stored_node: Optional[StoredNode] ) -> FullNode: if stored_node is not None: signature = stored_node.signature chunks = local_node.calc_chunks(signature=signature) else: signature = local_node.calc_signature(format="base64") chunks = local_node.calc_chunks(None) with atomic(): new_node = StoredNode.create( namespace=Namespace.for_session(session), root_folder=RootFolder.for_session(session), key=str(uuid4()), path=local_node.path, checksum=local_node.checksum, size=local_node.size, local_modified_time=local_node.modified_time, local_created_time=local_node.created_time, ready=False, signature=signature ) for chunk in chunks: NodeChunkModel.update_or_create(new_node, chunk) if stored_node: stored_node.delete_instance() new_node.key = local_node.key new_node.ready = True new_node.save() chunk_index = index_by("hash")(chunks) return FullNode( new_node, local_node, chunks, chunk_index, set(), set(chunk_index.keys()) )
def on_done(self, result): logging.info("[CHUNK] Chunk downloaded [%s:%r] form %s", local_node.path, self.chunk_hash, self.client.peer.device_id) session.stats.emit_chunk_download( (session.namespace, stored_node.key, stored_node.checksum), self.client.peer, len(result)) self.chunks[0].check(result) with atomic(): for chunk in self.chunks: local_node.write_chunk(chunk, result) market.provide_chunk(self.chunk_hash) available_chunks.add(self.chunk_hash) chunk_consumers = set(market.find_consumers(self.chunk_hash)) clients = list( client_pool.try_aquire_peers( (peer for peer in peer_registry.iter_peers(session.namespace) if peer.device_id in chunk_consumers), max_count=1)) for client in clients: tasks.submit(ExchangeMarketTask(client, market, session))