def sync(self): self._log('Starting sync on dir {}...'.format(self.tmp_dir)) while True: for file_path in [ os.path.join(dp, f) for dp, dn, names in os.walk(self.tmp_dir) for f in names ]: local_hash = DropboxContentHasher.calculate(file_path) remote_path = file_path[len(self.tmp_dir):] if not self.exists(remote_path): directory = os.path.dirname(remote_path) if directory and not self.exists(directory): self._get_client().files_create_folder_v2(directory) with open(file_path, 'rb') as f: file_size = os.path.getsize(file_path) self._log( 'Uploading {} [{} bytes] ({}) to {}...'.format( file_size, file_path, local_hash, remote_path)) if file_size <= self.CHUNK_SIZE: self._get_client().files_upload( f.read(), remote_path) else: self._log('Starting session...') upload_session_start_result = self._get_client( ).files_upload_session_start( f.read(self.CHUNK_SIZE)) cursor = files.UploadSessionCursor( session_id=upload_session_start_result. session_id, offset=f.tell()) commit = files.CommitInfo(path=remote_path) while f.tell() < file_size: self._print_progress(f.tell(), file_size) if file_size - f.tell() <= self.CHUNK_SIZE: self._get_client( ).files_upload_session_finish( f.read(self.CHUNK_SIZE), cursor, commit) self._print_progress(file_size, file_size) self._log('Session closed!') else: self._get_client( ).files_upload_session_append_v2( f.read(self.CHUNK_SIZE), cursor) cursor.offset = f.tell() self._log('Upload completed!') local_hash = DropboxContentHasher.calculate(file_path) remote_hash = self._get_client().files_get_metadata( remote_path).content_hash self._log( 'Remote file found with hash {}!'.format(remote_hash)) if local_hash == remote_hash: self._log('Removing {}...'.format(file_path)) os.unlink(file_path) self._log( 'File successfully uploaded: {} '.format(file_path)) sleep_time = settings.DEBUG and 10 or 60 * 60 self._log('Sleeping for {} seconds...'.format(sleep_time)) time.sleep(sleep_time)
def _upload(self, oid, file_like, metadata=None) -> OInfo: res = None metadata = metadata or {} file_like.seek(0, io.SEEK_END) size = file_like.tell() file_like.seek(0) if size < self.large_file_size: res = self._api('files_upload', file_like.read(), oid, mode=files.WriteMode('overwrite')) else: cursor = None while True: data = file_like.read(self.upload_block_size) if not data: if cursor: local_mtime = arrow.get( metadata.get('mtime', time.time())).datetime commit = files.CommitInfo( path=oid, mode=files.WriteMode.overwrite, autorename=False, client_modified=local_mtime, mute=True) res = self._api('files_upload_session_finish', data, cursor, commit) break if not cursor: res = self._api('files_upload_session_start', data) cursor = files.UploadSessionCursor(res.session_id, len(data)) else: self._api('files_upload_session_append_v2', data, cursor) cursor.offset += len(data) if res is None: raise CloudFileExistsError() ret = OInfo(otype=FILE, oid=res.id, hash=res.content_hash, path=res.path_display, size=size, mtime=self._mtime_from_metadata(res)) log.debug('upload result is %s', ret) return ret
def upload_dropbox_file_chucks(file_path, file_size): dbx = Dropbox(DROPBOX_API_KEY) f = open(file_path, 'rb') session = dbx.files_upload_session_start(f.read(FILE_CHUNK_SIZE)) cursor = dbx_files.UploadSessionCursor(session_id=session.session_id, offset=f.tell()) commit = dbx_files.CommitInfo(path=file_path) while f.tell() < file_size: if ((file_size - f.tell()) <= FILE_CHUNK_SIZE): dbx.files_upload_session_finish(f.read(FILE_CHUNK_SIZE), cursor, commit) else: dbx.files_upload_session_append(f.read(FILE_CHUNK_SIZE), cursor.session_id, cursor.offset) cursor.offset = f.tell()
def _upload_file(self, input_file, filepath): from dropbox import files session_id = self.client.files_upload_session_start('') current_offset = 0 while True: data = input_file.read(2**20) if not data: break self.client.files_upload_session_append(data, session_id.session_id, current_offset) current_offset += len(data) cursor = files.UploadSessionCursor(session_id.session_id, current_offset) self.client.files_upload_session_finish( '', cursor, files.CommitInfo(path='{}'.format(filepath)))
def upload_file(self, file_src, file_dst, chunk_size, autorename=False): file_size = os.path.getsize(file_src) response = None pb = tqdm(total=file_size, unit="B", unit_scale=True, desc=os.path.basename(file_src), miniters=1, ncols=80, mininterval=1) try: with open(file_src, 'rb') as f: if file_size <= chunk_size: response = self.client.files_upload(f.read(), file_dst, autorename=autorename) else: session_start = self.client.files_upload_session_start( f.read(chunk_size)) cursor = files.UploadSessionCursor( session_id=session_start.session_id, offset=f.tell()) commit = files.CommitInfo(path=file_dst, autorename=autorename) while f.tell() < file_size: pb.update(chunk_size) if file_size - f.tell() <= chunk_size: pb.update(file_size - f.tell()) response = self.client.files_upload_session_finish( f.read(chunk_size), cursor, commit) else: self.client.files_upload_session_append_v2( f.read(chunk_size), cursor) cursor.offset = f.tell() except exceptions.ApiError as exc: msg = "An error occurred while uploading '{0}': {1}.".format( file_src, exc.error.get_path().reason) raise error.ActionException(msg) from exc finally: pb.close() return response
def push_to_dropbox(branch_name, symbol, gui): global settings # Read saves_path = settings["SAVES_DIR"] temp_dir = settings["TEMP_DIR"] if settings["OAUTH"] == 'null': return "Please type in /login to use this feature" # clear temp_dir for path_temp in listdir(temp_dir): remove(path.join(temp_dir, path_temp)) # archive worlds starting with 'symbol' to temp_dir for path_save in listdir(saves_path): file_path = path.join(saves_path, path_save) if path.isdir(file_path) and path_save[0] == symbol: make_archive(path.join(temp_dir, path_save), 'zip', file_path) dbx = Dropbox(settings["OAUTH"].access_token) # clear branch_directory in dropbox try: if settings["CONFIRM"]: confirm = simpledialog.askstring( "Confirm", "Type in 'YES' if you wish to proceed. This will delete the current '{0}'" " branch if it already exists in dropbox".format(branch_name)) if not confirm == "YES": return "Action Not " dbx.files_delete("/" + branch_name) except Exception: pass println("Starting upload... ", gui) println( "Do not close the app until 'done uploading' message is shown on the console", gui) # upload every zip file to dropbox in temp_dir for path_temp in listdir(temp_dir): zip_file = path.join(temp_dir, path_temp) destination = "/" + branch_name + "/" + path_temp with open(zip_file, "rb") as f: file_size = path.getsize(zip_file) if file_size < CHUNK_SIZE: dbx.files_upload(f.read(), destination) else: # upload_session_start_result upload_ssr = dbx.files_upload_session_start(f.read(CHUNK_SIZE)) cursor = files.UploadSessionCursor( session_id=upload_ssr.session_id, offset=f.tell()) commit = files.CommitInfo(path=destination) while f.tell() < file_size: percent = str(f.tell() / file_size * 100) + "%" print(percent) if (file_size - f.tell()) <= CHUNK_SIZE: dbx.files_upload_session_finish( f.read(CHUNK_SIZE), cursor, commit) else: dbx.files_upload_session_append( f.read(CHUNK_SIZE), cursor.session_id, cursor.offset) cursor.offset = f.tell() # clear temp_dir for path_temp in listdir(temp_dir): remove(path.join(temp_dir, path_temp)) save(settings) return "Done Uploading"
key=lambda f: os.path.getctime("{}/{}".format(BACKUP_DIR, f))) if DROPBOX_UPLOAD: print('-- Syncing with Dropbox...') try: backup = open(BACKUP_DIR + '/' + backups_in_folder[-1], 'rb') file_size = os.path.getsize(BACKUP_DIR + '/' + backups_in_folder[-1]) CHUNK_SIZE = 30 * 1024 * 1024 dbx = dropbox.Dropbox(DROPBOX_UPLOAD_ACCESSKEY) if file_size <= CHUNK_SIZE: dbx.files_upload(backup, '/' + backups_in_folder[-1]) else: upload_session_start_result = dbx.files_upload_session_start( backup.read(CHUNK_SIZE)) cursor = files.UploadSessionCursor( session_id=upload_session_start_result.session_id, offset=backup.tell()) commit = files.CommitInfo(path='/' + backups_in_folder[-1]) while backup.tell() < file_size: if (file_size - backup.tell()) <= CHUNK_SIZE: dbx.files_upload_session_finish(backup.read(CHUNK_SIZE), cursor, commit) else: dbx.files_upload_session_append(backup.read(CHUNK_SIZE), cursor.session_id, cursor.offset) cursor.offset = backup.tell() except Exception, e: print('* ERROR:' + str(e))