def upload(dropbox_helper_id, access_token, size, max_retries): from .models import DropboxUploadHelper helper = DropboxUploadHelper.objects.get(id=dropbox_helper_id) client = DropboxClient(access_token) retries = 0 try: with open(helper.src, 'rb') as f: uploader = client.get_chunked_uploader(f, size) while uploader.offset < size: helper.progress = uploader.offset / size helper.save() try: uploader.upload_chunked() except ErrorResponse, e: if retries < helper.max_retries: retries += 1 else: helper.failure_reason = str(e) helper.save() raise e upload = uploader.finish(helper.dest) except Exception, e: helper.failure_reason = str(e) helper.save()
def post(self): key = self.request.get('session_key') blob_info = self.request.get('blob_info') logging.info('Task Queues returns key: %s, blob_info: %s.' %(key, blob_info)) session = SessionData.get(key) logging.info('Task Queues returns key: %s, blob_info: %s, retults in session: %s.' %(key, blob_info, session)) client = DropboxClient(DB_TOKEN, "en_US", rest_client=None) if not session.presentation_uploaded_to_db: f = session.blob_store_key.open() size = session.blob_store_key.size uploader = client.get_chunked_uploader(f, size) while uploader.offset < size: try: upload = uploader.upload_chunked() except: logging.error("Drop Box Error") filename = session.lastname + '_' + session.filename if session.session_date: date = session.session_date else: date = 'no-date-provided' response = uploader.finish('/beta/%s/%s/%s/%s'% (session.session_room, date, session.lastname, filename), overwrite = True) #folder structure /conf_name/room/date/lastname/filename session.presentation_uploaded_to_db = True session.presentation_db_path = response['mime_type'] session.presentation_db_size = response['size'] session.put() f.close() return
def dropbox_upload(request): if request.method == 'POST': # POST-ed file, file name, path to upload to fupload = request.FILES['file'] fname = fupload.name fsize = fupload.size path = '/' + fname # Make connection to dropbox API token = UserProfile.objects.get(user=request.user).dropbox_token client = DropboxClient(token) if fupload.multiple_chunks(): # If file is large enough to require chunked uploading uploader = client.get_chunked_uploader(fupload, fname) print 'uploading: ', fsize while uploader.offset < fsize: try: upload = uploader.upload_chunked() except rest.ErrorResponse, e: print 'ERROR WHILE UPLOADING CHUNKED: ', str(e) uploader.finish(path) else: response = client.put_file(path, fupload) print 'uploaded: ', response # print request.FILES return render(request, 'website/dropbox/fileview.html')
def upload(dropbox_helper_id, access_token, size, max_retries): from .models import DropboxUploadHelper helper = DropboxUploadHelper.objects.get(id=dropbox_helper_id) client = DropboxClient(access_token) retries = 0 try: with open(helper.src, 'rb') as f: uploader = client.get_chunked_uploader(f, size) while uploader.offset < size: helper.progress = uploader.offset / size helper.save() try: uploader.upload_chunked() except ErrorResponse, e: if retries < helper.max_retries: retries += 1 else: helper.failure_reason = str(e) helper.save() raise e upload = uploader.finish(helper.dest) except Exception, e: helper.failure_reason = str(e) helper.save()
def upload_chunked(file_path): """ Uploads a file in chucks to Dropbox, allowing it to resume on (connection) failure. """ dropbox_settings = DropboxSettings.get_solo() file_name = os.path.split(file_path)[-1] # From Dropbox docs. retries = 3 client = DropboxClient(dropbox_settings.access_token) size = os.stat(file_path).st_size file_handle = open(file_path, 'rb') uploader = client.get_chunked_uploader(file_handle, size) while uploader.offset < size: try: uploader.upload_chunked(chunk_size=1 * 1024 * 1024) except rest.ErrorResponse: # pragma: no cover retries -= 1 # pragma: no cover if retries == 0: # pragma: no cover raise IOError("Failed to upload to dropbox") # pragma: no cover # This will commit the file and persist it in Dropbox. Due to rotating backups we MUST override. uploader.finish(file_name, overwrite=True)
def savefile(fd,fname,bfirmid,bclientid): # Encrypt each chunk from fd as it is read into a # tmpfile which will be uploaded to Dropbox using # the given filename. r = requests.get("%s/keyserv/key/%s/%s" % (app.config['KEYSERVER_URI'],bfirmid,bclientid)) print "%s/keyserv/key/%s/%s" % (app.config['KEYSERVER_URI'],bfirmid,bclientid) keyobj = r.json() encrkey = keyobj['key'] print "Got key %s" % encrkey # Carve out a 32byte/256 bit key from the keyserver # but convert base64 back to binary first bkey = binascii.a2b_base64(encrkey) key = bkey[0:32] try: print "Starting encryption" # Setup our AES cipher iv = Random.new().read(AES.block_size) cipher = AES.new(key,AES.MODE_CFB,iv) #cipher = XORCipher.new(key) print "Cipher created using iv %s" % binascii.hexlify(iv) except: raise try: f = TemporaryFile() f.write(iv) for chunk in chunkfd(fd,blocksize=4194304): f.write(cipher.encrypt(chunk)) f.flush() f.seek(0,os.SEEK_END) fsize = f.tell() f.seek(0) except Exception as e: print e print "Getting ready for Dropbox upload" # Get a Dropbox uploader try: access_token = config.get('Credentials','access_token') dclient = DropboxClient(access_token) uploader = dclient.get_chunked_uploader(f,fsize) while uploader.offset < fsize: try: upload = uploader.upload_chunked() except Exception as e: print e except Exception as e: print e f.close() return uploader.finish(secure_filename("/%s_encr" % fname))
def post(self): key = self.request.get('session_key') c_key = self.request.get('conf_key') blob_info = self.request.get('blob_info') session = SessionData.get(key) conference_data = ConferenceData.get(c_key) if session.uploaded_to_dbox: logging.info('Session | %s | already exists'% session.name) return if conference_data.dbox_access_token: access_token = conference_data.dbox_access_token else: logging.error('FAILED access_token does not exist') #params = {'message':'Authorization token is either revoked or does not exist'} #taskqueue.add(url='/utilities/update_dropbox/', # method='GET', # params=params, # target='%s'% conference_data.module) return None try: client = DropboxClient(access_token, "en_US", rest_client=None) logging.info('SUCCESS dbox_client created %s' % client) except: logging.error('FAILED dbox_client was not created') return None f = session.blob_store_key.open() size = session.blob_store_key.size uploader = client.get_chunked_uploader(f, size) while uploader.offset < size: try: upload = uploader.upload_chunked() except: logging.error('FAILED upload of file %s'% f) params = {'session_key':key, 'conf_key': c_key, 'blob_key':blob_info} taskqueue.add(url='/utilities/update_dropbox/', method='POST', params=params, target='db-upload') filename = session.filename if (conference_data.name and session.room and session.presenter[1] and filename): response = uploader.finish('/%s/%s/%s/%s'% (conference_data.name, session.room, session.presenter[1], filename), overwrite = False) #folder structure /conf_name/room/date/lastname/filename elif filename: response = uploader.finish('/default/%s'% filename, overwrite = False) else: logging.error('FAILED problem naming file, file skipped') f.close() return None session.uploaded_to_dbox = True session.dbox_path = response['path'] session.dbox_size = response['size'] session.put() f.close() return
def upload(dropbox_helper_id, access_token, size, max_retries): from .models import DropboxUploadHelper helper = DropboxUploadHelper.objects.get(id=dropbox_helper_id) client = DropboxClient(access_token) retries = 0 try: with open(helper.src, 'rb') as f: uploader = client.get_chunked_uploader(f, size) while uploader.offset < size: helper.progress = uploader.offset / size helper.save() try: uploader.upload_chunked() except ErrorResponse as e: if retries < max_retries: retries += 1 else: helper.failure_reason = str(e) helper.save() raise e upload = uploader.finish(helper.dest) except Exception as e: helper.failure_reason = str(e) helper.save() couch_user = CouchUser.get_by_username(helper.user.username) if helper.failure_reason is None: share = client.share(upload['path']) context = { 'share_url': share.get('url', None), 'path': u'Apps/{app}{dest}'.format( app=settings.DROPBOX_APP_NAME, dest=upload['path'], ) } with localize(couch_user.get_language_code()): subject = _(u'{} has been uploaded to dropbox!'.format( helper.dest)) html_content = render_to_string( 'dropbox/emails/upload_success.html', context) text_content = render_to_string( 'dropbox/emails/upload_success.txt', context) else: context = {'reason': helper.failure_reason, 'path': helper.dest} with localize(couch_user.get_language_code()): subject = _(u'{} has failed to upload to dropbox'.format( helper.dest)) html_content = render_to_string('dropbox/emails/upload_error.html', context) text_content = render_to_string('dropbox/emails/upload_error.txt', context) send_HTML_email( subject, helper.user.email, html_content, text_content=text_content, )
class DropPy: def __init__(self, directory="/", key=None, secret=None, key_save="./", cursor=None): """Intialize a Dropbox connection, at directory specified or root. string directory. Location to consider root, relative to Dropbox root. string keysLoc. Location to store authentication json Any exceptions during the authorization process are not caught. See https://www.dropbox.com/developers/core/docs/python """ self.key = key self.secret = secret self.key_save = key_save self._dropbox_date = "%a, %d %b %Y %H:%M:%S %z" access_token = self._auth() self.cursor = cursor if directory[-1] != "/": directory += "/" self.directory = directory self.client = DropboxClient(access_token) def _auth(self): """Attempts to load an access token from key_save If unavailable, will guide the user through authentication """ pathname = _path(self.key_save, ".droppy") if path.exists(pathname): try: with open(pathname) as token: access_token = token.read() return access_token except: # If this fails for any reason, just have them reauth pass client = DropboxOAuth2FlowNoRedirect(self.key, self.secret) auth_url = client.start() print("Visit for authorization:\n{}".format(auth_url)) auth_code = input("Enter the authorization key: ") access_token, user_id = client.finish(auth_code) self._writeToken(access_token) return access_token def _writeToken(self, access_token=""): """Writes the access token to specified key location""" pathname = _path(self.key_save, ".droppy") with open(pathname, "w+") as token_file: token_file.write(access_token) def _get_dropbox_datetime(self, date_string): return datetime.strptime(date_string, self._dropbox_date) def _set_dropbox_datetime(self, datetime_obj): return datetime_obj.strftime(self._dropbox_date) def logout(self): """Destroys the current access token. The user will have to reauth.""" self.client.disable_access_token() def account_info(self): """Returns account info such as quota, email and display name.""" return self.client.account_info() def download(self, target, to="./", rev=None, start=None, length=None): """Downloads the current file to the specified, or local, directory target The path to the file that will be downloaded. If the first character is the forward slash ("/"), it will ignore the relative path of the DropPy instance, and instead begin from the Dropbox root to The local directory to download the file to. Defaults to current directory rev Optional previous rev value of the file to be downloaded. start Optional byte value from which to start downloading. length Optional length in bytes for partially downloading the file. If length is specified but start is not, then the last length bytes will be downloaded. Raises 400: Bad request (may be due to many things; check e.error for details). 404: No file was found at the given path, or the file that was there was deleted. 200: Request was okay but response was malformed in some way. """ filename = target.split("/").pop() target = _path(self.directory, target) with open(_path(to, filename), "wb") as out: with self.client.get_file(target, rev, start, length) as download: out.write(download.read()) def upload_chunked(self, fd, to=None, length=None): """Creates a chunked uploader If the file exists on the server, another is uploaded with (#) as a suffix fd File object from which the data will be sourced from to Optional path to upload to. Defaults to initialized directory length The number of bytes to upload. Defaults to full file. """ if length is None: length = path.getsize(fd.name) if to is None: to = self.directory to = _path(self.directory, to) filename = path.split(fd.name)[1] if length < 1: self.client.put_file(_path(to, filename), fd) else: uploader = self.client.get_chunked_uploader(fd, length) while uploader.offset < length: uploader.upload_chunked() uploader.finish(_path(to, filename)) def delta(self): """Retreive delta information from Dropbox. Allows you to monitor for changes. First change, cursor of None, returns all files. Subsequent calls, with cursor provided by previous calls, will provide changed files Returns all entries """ result = self.client.delta(self.cursor, _trim_d(self.directory)) self.cursor = result["cursor"] entries = result["entries"] while result["has_more"]: result = self.client.delta(self.cursor, _trim_d(self.directory)) self.cursor = result["cursor"] entries = entries + result["entries"] return entries def longpoll(self): pass def move(self, source, destination): """Moves a file from one place to another. Both source and destination are relative to initalized folder, unless preceded by directory altering prefix (eg. "/", "../", "../newFolder") source Origin of the file to move destination Place to move the file to Raises 400: Bad request (may be due to many things; check e.error for details). 403: An invalid move operation was attempted (e.g. there is already a file at the given destination, or moving a shared folder into a shared folder). 404: No file was found at given from_path. 503: User over storage quota. """ source = _path(self.directory, source) destination = _path(self.directory, destination) self.client.file_move(source, destination) def get_remote_files(self, directory="", deleted=False): remote_path = _path(self.directory, directory) metadata = self.client.metadata(remote_path, include_deleted=deleted) remote_files = metadata["contents"] for item in remote_files: if item["is_dir"]: remote_files = remote_files + self.get_remote_files(item["path"], deleted) return remote_files def sync(self, local=getcwd(), deleted=False, hidden=False): """Syncs the local file system to the remote file system. By default, will not delete any files that differ, only add new files local The local file directory to recusively sync with the remote. Default ./ delete Delete local files to keep in sync Does not delete remote files as it may not be running 24/7, tracking deletions Default False hidden Include hidden files in sync Default False """ local = path.abspath(local) if not path.isdir(local): raise ValueError("sync requires local to be a directory.") if local[-1] != "/": local += "/" local_files = {} for item in list(Path(local).glob("**/*")): if not hidden and ( search("/\.\w+", str(item)) \ or match("\.\w+", str(item)) ): continue local_files[str(item)[len(local):]] = { "mtime": int(item.stat().st_mtime) } remote_files_meta = self.get_remote_files(deleted=deleted) remote_files = {} remote_dirs = [] for item in remote_files_meta: isDeleted = "is_deleted" in item and item["is_deleted"] i = item["path"] mtime = self._get_dropbox_datetime(item["modified"]) if not hidden and (search("/\.\w+", str(i)) or match("\.\w+", str(i))): continue # Dropbox is not case sensitive, so make sure we preserve for local if i.startswith(self.directory) or i.startswith(self.directory.lower()): i = i[len(self.directory):] if item["is_dir"]: remote_dirs.append(i) remote_files[i] = { "mtime": int(mtime.strftime("%s")), "deleted": isDeleted } download = sorted([item for item in remote_files if item not in \ local_files and not remote_files[item]["deleted"]]) delete = sorted([item for item in remote_files if item in \ local_files and remote_files[item]["deleted"]]) #upload = sorted([item for item in local_files if item not in remote_files]) #for item in upload: # item_path = local / Path(item) # # if item_path.is_dir(): # self.client.file_create_folder(_path(self.directory, item)) # else: # parts = [part for part in item_path.parts if part not in Path(local).parts] # to = "/".join(parts[:-1]) # with open(str(item_path), "rb") as f: # self.upload_chunked(f,to=to) for item in download: item_path = Path(item) if item in remote_dirs: mkdir(_path(local, item)) else: parts = item_path.parts to = _path(local, "/".join(parts[:-1])) self.download(_path(self.directory + item), to=to) for item in delete: p = Path(local) / item if p.is_dir(): rmtree( str(p) ) elif p.is_file(): remove( str(p) )