def upload_again(session: Session, path: str): path = Path(path).resolve() with open(path, "ab") as f: f.write(b"1") node = LocalNode.create(path, session) remote_history = RemoteNodeHistory(key=node.key, etag=None, history=None) remote_history.load(session) upload(remote_history, node)(session) show_history(session, node.key)
def download_new_version(session: Session, path: str): path = Path(path).resolve() node = LocalNode.create(path, session) remote_history = RemoteNodeHistory(key=node.key, etag=None, history=None) remote_history.load(session) stored_history = models.StoredNodeHistory\ .get(models.StoredNodeHistory.key == node.key) download(remote_history, stored_history)(session) show_history(session, node.key)
def show_history(session: Session, key: str): print_line("History") print_line("Remote") remote_history = RemoteNodeHistory(key=key, etag=None, history=None) remote_history.load(session) pprint(remote_history.history.dict()) stored_history = models.StoredNodeHistory\ .get_or_none(models.StoredNodeHistory.key == key) print_line("Stored") pprint(models.recored_as_dict(stored_history) if stored_history else None) print_line("End History")
def delete_remote( remote_history: RemoteNodeHistory, stored_history: StoredNodeHistory, session: Session, ) -> SyncActionResult: history = cast(NodeHistory, remote_history.history) (session.signature_folder / history.last.key).unlink() s3util.delete_file(session.s3_client, session.storage_bucket, f"{session.s3_prefix}/{history.path}") history.add_delete_marker() remote_history.save(session) stored_history.delete().execute() return SyncActionResult()
def clear_remote(session: Session): # upload path = create_file(session.root_folder.path, MB(1)) node = LocalNode.create(path, session) upload(None, node)(session) remote_history = RemoteNodeHistory(key=node.key, etag=None, history=None) remote_history.load(session) stored_history = models.StoredNodeHistory\ .get(models.StoredNodeHistory.key == node.key) delete_remote(remote_history, stored_history)(session) node.local_path.unlink() show_history(session, node.key)
def fetch_history( session: Session ) -> Tuple[List[RemoteNodeHistory], List[StoredNodeHistory]]: remote_history_versions = list_versions( session.s3_client, session.internal_bucket, f"{session.s3_prefix}/{session.sync_metadata_prefix}/history/", ) remote_history = (RemoteNodeHistory.from_s3_object(v) for v in remote_history_versions) stored_history = StoredNodeHistory.select().where( StoredNodeHistory.root_folder == RootFolder.for_session(session)) all_history = list(chain(remote_history, stored_history)) all_history.sort(key=lambda h: h.key) rows = [ HistoryRow.create(key, history) for key, history in groupby(all_history, key=lambda h: h.key) ] for _, remote, stored in rows: if remote: if not stored or remote.etag != stored.remote_history_etag: remote.load(session) else: remote.history = copy(stored.history) return ([r for _, r, s in rows if r is not None], [s for _, _, s in rows if s is not None])
def remote(self, deleted=False, **extra_attrs): attrs = { "key": self.key, "etag": self.history_etag, "history": Bunch(etag=self.etag, deleted=deleted), **extra_attrs, } return RemoteNodeHistory(**attrs)
def download_first_time(session: Session): # upload path = create_file(session.root_folder.path, MB(1)) node = LocalNode.create(path, session) upload(None, node)(session) # clear local delete_local( node, models.StoredNodeHistory.get(models.StoredNodeHistory.key == node.key) )(session) # download remote_history = RemoteNodeHistory(key=node.key, etag=None, history=None) remote_history.load(session) download(remote_history, None)(session) show_history(session, node.key)
def create_full_version(session: Session, local_path: str): node = LocalNode.create(Path(local_path).resolve(), session) remote_history = RemoteNodeHistory(history=None, key=node.key, etag=None) remote_history.load(session) history = remote_history.history last = history.last prev = history.entries[-2] assert last.base_version is None assert last.has_delta assert prev.base_version is not None with create_temp_file() as base_path: s3_path = f"{session.s3_prefix}/{node.path}" download_file(session.s3_client, session.storage_bucket, s3_path, base_path, version=prev.base_version) patch_file(session, base_path, [last.key]) upload_file( session.s3_client, base_path, session.storage_bucket, s3_path, ) obj = get_file_metadata(session.s3_client, session.storage_bucket, s3_path) last.base_version = obj["VersionId"] last.base_size = int(obj.get("Size", 0)) remote_history.save(session)
def handle_node(remote: RemoteNodeHistory, local: LocalNode, stored: StoredNodeHistory) -> SyncAction: if not remote and not local and not stored: return nop() elif not remote and not local and stored: return delete_history(stored) elif not remote and local and not stored: return upload(None, local) elif not remote and local and stored: return delete_local(local, stored) elif remote.exists and not local and not stored: return download(remote, None) elif remote.exists and not local and stored: return delete_remote(remote, stored) elif remote.deleted and not local and stored: return delete_history(stored) elif remote and local and not stored: if remote.deleted: return delete_local(local, stored) elif cast(NodeHistory, remote.history).etag == local.etag: return save_history(remote, local) else: return conflict(remote, local, stored) elif remote and local and stored: local_updated = local.updated(stored) remote_updated = remote.updated(stored) if remote.deleted: if local_updated: return conflict(remote, local, stored) else: return delete_local(local, stored) elif local_updated and remote_updated: if cast(NodeHistory, remote.history).etag == local.etag: return nop() else: return conflict(remote, local, stored) return nop() elif local_updated: return upload(remote, local) elif remote_updated: return download(remote, stored) else: return nop() return nop()
def upload(remote_history: Optional[RemoteNodeHistory], node: LocalNode, session: Session) -> SyncActionResult: """ 1. Without remote history: - Calc signature - Generate id - Create new history - Upload base - Upload history - Store history in local DB 2. With remote history: - Generate key - Calc delta - Calc signature - Upload delta - Upload signature - Add history record - Upload history - Store history in local DB """ new_key = NodeHistoryEntry.generate_key() if remote_history is not None: history = cast(NodeHistory, remote_history.history) with create_temp_file() as delta_path: calc_delta(session, node.local_fspath, history.last.key, delta_path) file_transfer.upload_metadata(session, delta_path, new_key, "delta") delta_size = Path(delta_path).stat().st_size with create_temp_file() as signature_path: calc_signature(session, node.local_fspath, new_key, signature_path) file_transfer.upload_metadata(session, signature_path, new_key, "signature") history.add_entry( NodeHistoryEntry.create_delta_only(new_key, node.calc_etag(), delta_size)) else: with create_temp_file() as signature_path: calc_signature(session, node.local_fspath, new_key, signature_path) file_transfer.upload_metadata(session, signature_path, new_key, "signature") version = file_transfer.upload_to_root(session, node) history = NodeHistory(key=node.key, path=node.path, entries=[]) history.add_entry( NodeHistoryEntry.create_base_only(new_key, node.calc_etag(), version, node.size)) remote_history = RemoteNodeHistory(history=history, key=node.key, etag=None) remote_history.save(session) stored_history = StoredNodeHistory.get_or_none( StoredNodeHistory.key == history.key) if stored_history is not None: stored_history.data = history.dict() stored_history.remote_history_etag = remote_history.etag stored_history.local_modified_time = node.created_time stored_history.local_created_time = node.modified_time stored_history.save() else: StoredNodeHistory.create(key=remote_history.key, root_folder=RootFolder.for_session(session), data=cast(NodeHistory, remote_history.history).dict(), local_modified_time=node.created_time, local_created_time=node.modified_time, remote_history_etag=remote_history.etag) return SyncActionResult()