def get_folder_tree_raw(): """Returns the raw data retrieved from Drobox""" # Check authentication if 'user' not in session: return redirect(url_for('login')) access_token = get_access_token() #sizes = {} delta_metadatas = {} cursor = None if access_token is not None: client = DropboxClient( access_token) #folder_metadata = client.metadata(path) #metadata = json.dumps(folder_metadata, sort_keys=True, indent=4, separators=(',', ': ')) print("Fetching results...") i = 0 while cursor is None or result['has_more']: i = i + 1 print("Fetching result set %d." % i) result = client.delta(cursor) # delta_metadatas = parse_delta(result) for path, metadata in result['entries']: #sizes[path] = metadata['bytes'] if metadata else 0 delta_metadatas[path] = metadata if metadata else None cursor = result['cursor'] # Disable complete fetch result['has_more'] = False return jsonify(delta_metadatas)
def process_user(uid): '''Call /delta for the given user ID and process any changes.''' # OAuth token for the user token = redis_client.hget('tokens', uid) # /delta cursor for the user (None the first time) cursor = redis_client.hget('cursors', uid) client = DropboxClient(token) has_more = True while has_more: result = client.delta(cursor) for path, metadata in result['entries']: # Ignore deleted files, folders, and non-markdown files if (metadata is None or metadata['is_dir'] or not path.endswith('.md')): continue # Convert to Markdown and store as <basename>.html html = markdown(client.get_file(path).read()) client.put_file(path[:-3] + '.html', html, overwrite=True) # Update cursor cursor = result['cursor'] redis_client.hset('cursors', uid, cursor) # Repeat only if there's more to do has_more = result['has_more']
def get_folder_tree_raw(): """Returns the raw data retrieved from Drobox""" # Check authentication if 'user' not in session: return redirect(url_for('login')) access_token = get_access_token() #sizes = {} delta_metadatas = {} cursor = None if access_token is not None: client = DropboxClient(access_token) #folder_metadata = client.metadata(path) #metadata = json.dumps(folder_metadata, sort_keys=True, indent=4, separators=(',', ': ')) print ("Fetching results...") i = 0 while cursor is None or result['has_more']: i = i + 1 print ("Fetching result set %d." % i) result = client.delta(cursor) # delta_metadatas = parse_delta(result) for path, metadata in result['entries']: #sizes[path] = metadata['bytes'] if metadata else 0 delta_metadatas[path] = metadata if metadata else None cursor = result['cursor'] # Disable complete fetch result['has_more'] = False return jsonify(delta_metadatas)
def process_user(uid): '''Call /delta for the given user ID and process any changes.''' # OAuth token for the user token = redis_client.hget('tokens', uid) # /delta cursor for the user (None the first time) cursor = redis_client.hget('cursors', uid) client = DropboxClient(token) has_more = True while has_more: result = client.delta(cursor) for path, metadata in result['entries']: # Ignore deleted files, folders, and non-markdown files if (metadata is None or metadata['is_dir'] or not path.endswith('.md')): continue # Convert to Markdown and store as <basename>.html response, metadata = client.get_file_and_metadata(path) md = response.read().decode() html = markdown(md, extensions=['gfm']) html_name = path[:-3] + '.html' client.put_file(html_name, html, overwrite=True) # Include URL to published file in HTML comment at top of Markdown # file if '<!-- Published file url:' != md.split('\n')[0]: share_url = client.share(html_name, short_url=False).get('url') file_key = share_url.split('/')[4] url_name = urllib.parse.quote(html_name) url_comment = ('<!-- Published file url:\n' 'https://dl.dropboxusercontent.com/s/' '{}{}\n-->\n'.format(file_key, url_name)) md = url_comment + md client.put_file(path, md, overwrite=True) # Update cursor cursor = result['cursor'] redis_client.hset('cursors', uid, cursor) # Repeat only if there's more to do has_more = result['has_more']
def process_user(uid): '''Call /delta for the given user ID and process any changes.''' # OAuth token for the user token = redis_client.hget('tokens', uid) # /delta cursor for the user (None the first time) cursor = redis_client.hget('cursors', uid) client = DropboxClient(token) has_more = True while has_more: result = client.delta(cursor) for path, metadata in result['entries']: filename, fileext = os.path.splitext(path) # Ignore deleted files, folders, and non-python files if (metadata is None or metadata['is_dir'] or fileext != '.py' or '-disappointed' in filename): continue with client.get_file(path) as fin: original_code = fin.read() try: formatted_code = FormatCode(original_code) suffix = "reformed" # Only reform heretical code if original_code != formatted_code: formatted_code = credit(formatted_code) client.put_file(filename + '-reformed.py', formatted_code, overwrite=True) except: # code itself was somehow invalid. with open('facepalm.py', 'rb') as facepalm: client.put_file(filename + '-disappointed.py', facepalm, overwrite=True) # Update cursor cursor = result['cursor'] redis_client.hset('cursors', uid, cursor) # Repeat only if there's more to do has_more = result['has_more']
def process_user(uid): '''Call /delta for the given user ID and process any changes.''' # OAuth token for the user token = redis_client.hget('tokens', uid) # /delta cursor for the user (None the first time) cursor = redis_client.hget('cursors', uid) client = DropboxClient(token) has_more = True while has_more: result = client.delta(cursor) for path, metadata in result['entries']: # Ignore deleted files, folders, non-jpegs, and our output if (metadata is None or metadata['is_dir'] or not metadata.get('thumb_exists') or path.endswith('-corrected.jpg')): continue temp_path = tempfile.mkdtemp() with open(temp_path + '/input.jpg', 'wb') as fout: with client.thumbnail(path, size='l', format='JPEG') as fin: fout.write(fin.read()) shell.convert(temp_path + '/input.jpg', '-modulate', 250, '-fill', 'gold', '-tint', 100, temp_path + '/corrected.jpg') with open(temp_path + '/corrected.jpg', 'rb') as f: client.put_file(os.path.splitext(path)[0] + '-corrected.jpg', f, overwrite=True) shutil.rmtree(temp_path) # Update cursor cursor = result['cursor'] redis_client.hset('cursors', uid, cursor) # Repeat only if there's more to do has_more = result['has_more']
def process_user(uid): '''Call /delta for the given user ID and process any changes.''' # OAuth token for the user token = redis_client.hget('tokens', uid) # /delta cursor for the user (None the first time) cursor = redis_client.hget('cursors', uid) client = DropboxClient(token) has_more = True while has_more: result = client.delta(cursor) for path, metadata in result['entries']: filename, fileext = os.path.splitext(path) # Ignore deleted files, folders, and non-python files if (metadata is None or metadata['is_dir'] or fileext != '.py' or '-disappointed' in filename or '-reformed' in filename): continue with client.get_file(path) as fin: original_code = fin.read() try: formatted_code = FormatCode(original_code) suffix = "reformed" # Only reform heretical code if original_code != formatted_code: formatted_code = credit(formatted_code) client.put_file(filename + '-reformed.py', formatted_code, overwrite=True) except: # code itself was somehow invalid. with open('facepalm.py', 'rb') as facepalm: client.put_file(filename + '-disappointed.py', facepalm, overwrite=True) # Update cursor cursor = result['cursor'] redis_client.hset('cursors', uid, cursor) # Repeat only if there's more to do has_more = result['has_more']
def process_user(uid): '''Call /delta for the given user ID and process any changes.''' # OAuth token for the user token = redis_client.hget('tokens', uid) # /delta cursor for the user (None the first time) cursor = redis_client.hget('cursors', uid) client = DropboxClient(token) has_more = True while has_more: result = client.delta(cursor) for path, metadata in result['entries']: # Ignore deleted files, folders, non-jpegs, and our output if (metadata is None or metadata['is_dir'] or not metadata.get('thumb_exists') or path.endswith('-corrected.jpg')): continue temp_path = tempfile.mkdtemp() with open(temp_path + '/input.jpg', 'wb') as fout: with client.thumbnail(path, size='l', format='JPEG') as fin: fout.write(fin.read()) shell.convert(temp_path + '/input.jpg', '-modulate', 250, '-fill', 'gold', '-tint', 100, temp_path + '/corrected.jpg') with open(temp_path + '/corrected.jpg', 'rb') as f: client.put_file(os.path.splitext(path)[0]+'-corrected.jpg', f, overwrite=True) shutil.rmtree(temp_path) # Update cursor cursor = result['cursor'] redis_client.hset('cursors', uid, cursor) # Repeat only if there's more to do has_more = result['has_more']
def haunt_user(uid): '''Call /delta for the given user ID and haunt any new images.''' # OAuth token for the user token = redis_client.hget('tokens', uid) # /delta cursor for the user (None the first time) cursor = redis_client.hget('cursors', uid) # These are all v1-style API calls. client = DropboxClient(token) has_more = True while has_more: result = client.delta(cursor) for path, metadata in result['entries']: if (metadata is None or metadata['is_dir'] or not metadata.get('thumb_exists') or path.endswith('-spookified.gif')): continue temp_path = tempfile.mkdtemp() with open(temp_path + '/alive.jpg', 'wb') as fout: with client.thumbnail(path, size='l', format='JPEG') as fin: fout.write(fin.read()) width = int(shell.identify('-format', '%[fx:w]', temp_path + '/alive.jpg').stdout) shell.composite('-watermark', '50%', '-gravity', 'center', '-channel', 'RGBA', '(', 'halloweenghost.png', '-resize', '{}x'.format(width), ')', temp_path + '/alive.jpg', temp_path + '/second_frame.png') shell.convert('-delay', '4000x1000', '-loop', 0, temp_path + '/alive.jpg', '-delay', '200x1000', temp_path + '/second_frame.png', temp_path + '/ghost.gif') with open(temp_path + '/ghost.gif', 'rb') as f: client.put_file(os.path.splitext(path)[0]+'-spookified.gif', f, overwrite=True) shutil.rmtree(temp_path) # Update cursor cursor = result['cursor'] redis_client.hset('cursors', uid, cursor) # Repeat only if there's more to do has_more = result['has_more']
class PiBox(object): def __init__(self, dropbox_token, pi_box_root, delta_cursor_file=''): self.token = dropbox_token self.pi_box_root = pi_box_root self.client = DropboxClient(dropbox_token) self.delta_cursor_file = delta_cursor_file self.delta_cursor = self._read_cursor() def _full_local_path(self, path): return self.pi_box_root + path def _read_cursor(self): try: with open(self.delta_cursor_file, 'r') as f: return f.read() except IOError: return None def _save_cursor(self): try: with open(self.delta_cursor_file, 'w+') as f: f.write(self.delta_cursor) except IOError: pass def get_delta(self): response = self.client.delta(cursor=self.delta_cursor) self.delta_cursor = response['cursor'] self._save_cursor() return response def get_file(self, from_path): output_file = open(self._full_local_path(from_path), 'w+b') with self.client.get_file(from_path) as f: output_file.write(f.read()) def make_local_directory(self, path): local_path = self._full_local_path(path) if not os.path.exists(local_path): os.makedirs(local_path)
class DropboxStorage(object): calibre_db_path = '/%s/metadata.db' % settings.DROPBOX_CALIBRE_DIR dropbox_cursor_key = 'dropbox_cursor' def __init__(self): session = DropboxSession(settings.DROPBOX_CONSUMER_KEY, settings.DROPBOX_CONSUMER_SECRET, settings.DROPBOX_ACCESS_TYPE, locale=None) session.set_token(settings.DROPBOX_ACCESS_TOKEN, settings.DROPBOX_ACCESS_TOKEN_SECRET) self.client = DropboxClient(session) def get_url(self, path, share=False): try: if share: result = self.client.share(path, short_url=False) return result['url'] + '?dl=1' return self.client.media(path).get('url') except ErrorResponse: pass def get_file(self, path): try: return self.client.get_file(path) except ErrorResponse: pass def sync_db(self): calibre_db = self.client.get_file(self.calibre_db_path) with open(settings.DATABASES['calibre']['NAME'], 'wb') as f: f.write(calibre_db.read()) def need_update(self): delta = self.client.delta(cursor=cache.get(self.dropbox_cursor_key), path_prefix=self.calibre_db_path) cache.set(self.dropbox_cursor_key, delta['cursor'], timeout=None) return len(delta['entries']) > 0
def update(init=False): """Generate the html and upload to S3 only for the files that have changed """ if init: cursor = None else: cursor = redis_client.get('cursor') client = DropboxClient(DROPBOX_TOKEN) has_more = True while has_more: result = client.delta(cursor=cursor, path_prefix=DROPBOX_ROOT) for path, metadata in result['entries']: # Ignore deleted files, folders, and non-markdown files if (metadata is None or metadata['is_dir'] or not path.endswith('.md')): continue # Extract file name from full path filename = parse_name(path) # Convert to Markdown html_raw = markdown(client.get_file(path).read()) html = add_template(html_raw) # Upload the file to S3 s3_upload(html, filename) # Update cursor cursor = result['cursor'] redis_client.set('cursor', cursor) # Repeat only if there's more to do has_more = result['has_more']
class FolderSize: def __init__(self, dropbox_token): self.token = dropbox_token self.client = DropboxClient(self.token) def get_sizes(self): sizes = {} cursor = None while cursor is None or result['has_more']: result = self.client.delta(cursor) for path, metadata in result['entries']: sizes[path] = metadata['bytes'] if metadata else 0 cursor = result['cursor'] foldersizes = defaultdict(lambda: 0) for path, size in sizes.items(): segments = path.split('/') for i in range(1, len(segments)): folder = '/'.join(segments[:i]) if folder == '': folder = '/' foldersizes[folder] += size return foldersizes
def update(init=False): """Generate the html and upload to S3 only for the files that have changed """ if init: cursor = None else: cursor = redis_client.get("cursor") client = DropboxClient(DROPBOX_TOKEN) has_more = True while has_more: result = client.delta(cursor=cursor, path_prefix=DROPBOX_ROOT) for path, metadata in result["entries"]: # Ignore deleted files, folders, and non-markdown files if metadata is None or metadata["is_dir"] or not path.endswith(".md"): continue # Extract file name from full path filename = parse_name(path) # Convert to Markdown html_raw = markdown(client.get_file(path).read()) html = add_template(html_raw) # Upload the file to S3 s3_upload(html, filename) # Update cursor cursor = result["cursor"] redis_client.set("cursor", cursor) # Repeat only if there's more to do has_more = result["has_more"]
def update_filetree(): if 'access_token' not in session: abort(400) client = DropboxClient(session['access_token']) user_id = session['user_id'] has_more = True cursor = DBC.get_delta_cursor(user_id) changed = False # if we do work in memory, keep a flag so # we know to consolidate the work we did # in memory and save it to the DB do_work_in_memory = False memcache = {'tree': None, 'tab': None} while has_more: delta = client.delta(cursor) if delta['reset'] is True: DBC.clear(user_id) if len(delta['entries']) > 0: changed = True entries = delta['entries'] # do we want batch our work in memmory, or flush directly to the DB? # depends on the number of entries; see DELTA_DO_WORK_IN_MEMORY_THRESHOLD if do_work_in_memory or len( entries) > DELTA_DO_WORK_IN_MEMORY_THRESHOLD: do_work_in_memory = True if memcache['tree'] is None: memcache['tree'] = DBC.read(user_id) memcache['tab'] = build_index_table(memcache['tree']) process_delta_entries_in_memory(entries, memcache['tab']) # print "processed %s entries in memory, deferring DB write..." % len(entries) else: for entry in entries: [path, metadata] = entry if metadata is None: DBC.delete_path(user_id, path) else: DBC.update_path(user_id, metadata['path'], metadata) # print "processed %s entries by directly updating DB" % len(entries) has_more = delta['has_more'] cursor = delta['cursor'] tree = None if do_work_in_memory: # flush our in-memory tree into the DB DBC.overwrite(user_id, memcache['tree'], cursor) tree = prune(memcache['tree'], MAX_DIRECTORY_DEPTH) else: # only update the cursor DBC.set_delta_cursor(user_id, cursor) tree = DBC.read(session['user_id'], MAX_DIRECTORY_DEPTH) result = {'changed': changed, 'cursor': cursor, 'tree': tree} return result
class DropboxAPI(StorageAPI, AppendOnlyLog): "dropbox@auth : dropbox.com account with auth info" def __init__(self): from params import AUTH_DIR authdir = AUTH_DIR self.auth_file = os.path.join(authdir, 'dropbox.auth') try: with open(self.auth_file, 'r') as file: ACCESS_TOKEN = file.readline().rstrip() USER_ID = file.readline().rstrip() except IOError: ACCESS_TOKEN, USER_ID = self._authorize() self.client = DropboxClient(ACCESS_TOKEN) def sid(self): return util.md5("dropbox") % 10000 def copy(self): return DropboxAPI() def _authorize(self): dbg.info('Request access token from Dropbox') flow = DropboxOAuth2FlowNoRedirect(APP_KEY, APP_SECRET) authorize_url = flow.start() # print 'Open auth url:', authorize_url #browser = webdriver.PhantomJS(service_log_path=os.path.join(tempfile.gettempdir(), 'ghostdriver.log')) #browser = webdriver.PhantomJS(service_log_path=os.path.join(tempfile.gettempdir(), 'ghostdriver.log'), service_args=['--ignore-ssl-errors=true', '--ssl-protocol=tlsv1']) # Change to rely on browser print( "We need to authorize access to Dropbox. Please visit the following URL and authorize the access:" ) print(authorize_url) print("") code = raw_input("Input the code you got: ").strip() #code = #raw_input("Enter the authorization code here: ").strip() access_token, user_id = flow.finish(code) with open(self.auth_file, 'w') as file: file.write(access_token + "\n") file.write(user_id + "\n") dbg.info('Authentication successful') return (access_token, user_id) # return: list of file paths def listdir(self, path): dic = self.client.metadata(path) lst = map(lambda x: x["path"], dic["contents"]) lst = map(lambda x: x.split("/")[-1], lst) return lst def exists(self, path): try: dic = self.client.metadata(path) if (dic.has_key("is_deleted") and dic["is_deleted"]): return False return True except: return False def get(self, path): """Get the file content Args: path: string Returns: content: string """ conn = self.client.get_file(path) content = conn.read() conn.close() return content def get_file_rev(self, path, rev): # get file of a previous version with rev hash_id content = None try: conn = self.client.get_file(path, rev=rev) content = conn.read() conn.close() except ErrorResponse as detail: #print "[get_file_rev] File doesn't exist", detail return None return content def put(self, path, content): """Upload the file Args: path: string content: string, size <= 4MB Returns: None """ from dropbox.rest import ErrorResponse strobj = StringIO(content) try: metadata = self.client.put_file(path, strobj, overwrite=False, autorename=False) except ErrorResponse as e: if e.status == 409: raise ItemAlreadyExists(e.status, e.reason) else: raise APIError(e.status, e.reason) return True def putdir(self, path): self.client.file_create_folder(path) def update(self, path, content): """Update the file Args and returns same as put """ strobj = StringIO(content) metadata = self.client.put_file(path, strobj, overwrite=True) return True def rm(self, path): """Delete the file Args: path: string """ self.client.file_delete(path) def rmdir(self, path): self.client.file_delete(path) def metadata(self, path): # only for file, not dir _md = self.client.metadata(path) md = {} md['size'] = _md['bytes'] md['mtime'] = util.convert_time(_md['modified']) return md def delta(self, path=None, cursor=None): resp = self.client.delta(cursor=cursor, path_prefix=path) cursor = resp['cursor'] changes = [] for entry in resp['entries']: event = {} if entry[1]: # we don't care about delete event event['path'] = entry[0] if entry[1]['is_dir']: event['type'] = 'folder' else: event['type'] = 'file' changes.append(event) return cursor, changes def poll(self, path=None, cursor=None, timeout=30): # timeout max 480 import requests import time from error import PollError beg_time = time.time() end_time = beg_time + timeout curr_time = beg_time url = 'https://api-notify.dropbox.com/1/longpoll_delta' params = {} changes = [] if path: path = util.format_path(path) if not cursor: cursor, _ = self.delta(path) curr_time = time.time() while True: params['cursor'] = cursor params['timeout'] = max(30, int(end_time - curr_time)) # minimum 30 second resp = requests.request('GET', url, params=params) obj = resp.json() if 'error' in obj: raise PollError(resp.status_code, resp.text) if obj['changes']: cursor, _delta = self.delta(path, cursor) changes.extend(_delta) if changes: break curr_time = time.time() if curr_time > end_time: break return cursor, changes def init_log(self, path): if not self.exists(path): self.put(path, '') def reset_log(self, path): if self.exists(path): self.rm(path) def append(self, path, msg): self.update(path, msg) def get_logs(self, path, last_clock): length = 5 # latest revision comes first revisions = self.client.revisions(path, rev_limit=length) if not revisions: return [], None new_logs = [] new_clock = revisions[0]['rev'] end = False # if reach to end while True: for metadata in revisions: if last_clock and metadata['rev'] == last_clock: end = True break if end: break if len(revisions) < length: break # still have logs unread, double the length length *= 2 revisions = self.client.revisions(path, rev_limit=length) # download the content of unseen rev for metadata in revisions: if last_clock and metadata['rev'] == last_clock: break if 'is_deleted' in metadata and metadata['is_deleted']: continue msg = self.get_file_rev(path, metadata['rev']) if len(msg) > 0: new_logs.insert(0, msg) return new_logs, new_clock def __msg_index(self, fn): return eval(fn[3:]) def init_log2(self, path): if not self.exists(path): self.putdir(path) def append2(self, path, msg): path = util.format_path(path) lst = sorted(self.listdir(path)) if lst: index = self.__msg_index(lst[-1]) + 1 else: index = 0 while True: fn = 'msg%d' % index fpath = path + '/' + fn try: self.put(fpath, msg) except ItemAlreadyExists: index += 1 else: break def get_logs2(self, path, last_clock): path = util.format_path(path) lst = self.listdir(path) if not lst: return [], None srt = {} for fn in lst: srt[self.__msg_index(fn)] = fn lst = [srt[i] for i in sorted(srt.keys(), reverse=True)] new_logs = [] new_clock = self.__msg_index(lst[0]) for fn in lst: if last_clock == None and self.__msg_index(fn) == last_clock: break msg = self.get(path + '/' + fn) new_logs.insert(0, msg) return new_logs, new_clock def share(self, path, target_email): url = "https://www.dropbox.com/" print 'Get access token from Dropbox' print 'Open auth url:', url browser = webdriver.PhantomJS( service_log_path=os.path.join(tempfile.gettempdir(), 'ghostdriver.log'), service_args=['--ignore-ssl-errors=true', '--ssl-protocol=tlsv1']) browser.get(url) try: wait = WebDriverWait(browser, 30) btn = wait.until( EC.element_to_be_clickable( (By.XPATH, "//div[@id='sign-in']/a"))) btn.click() email = wait.until( EC.element_to_be_clickable( (By.XPATH, "//input[@id='login_email']"))) email.send_keys(raw_input("Enter your Dropbox email:")) pwd = browser.find_element_by_xpath( "//input[@id='login_password']") pwd.send_keys(getpass.getpass("Enter your Dropbox password:"******"//a[text()='%s']" % path))) target_folder.click() wait.until(EC.title_contains("%s" % path)) share_btn = browser.find_element_by_xpath( "//a[@id='global_share_button']") share_btn.click() target = wait.until( EC.element_to_be_clickable(( By.XPATH, "//form[@class='invite-more-form']//input[@spellcheck][@type='text']" ))) target.send_keys(target_email) confirm_btn = browser.find_element_by_xpath( "//form[@class='invite-more-form']//input[@type='button'][1]") confirm_btn.click() except: print(browser.title) assert False # print(browser.current_url) # print(browser.page_source) pass
class DropboxAPI(StorageAPI, AppendOnlyLog): "dropbox@auth : dropbox.com account with auth info" def __init__(self): from params import AUTH_DIR authdir = AUTH_DIR self.auth_file = os.path.join(authdir, 'dropbox.auth') try: with open(self.auth_file, 'r') as file: ACCESS_TOKEN = file.readline().rstrip() USER_ID = file.readline().rstrip() except IOError: ACCESS_TOKEN, USER_ID = self._authorize() self.client = DropboxClient(ACCESS_TOKEN) def sid(self): return util.md5("dropbox") % 10000 def copy(self): return DropboxAPI() def _authorize(self): dbg.info('Request access token from Dropbox') flow = DropboxOAuth2FlowNoRedirect(APP_KEY, APP_SECRET) authorize_url = flow.start() # print 'Open auth url:', authorize_url #browser = webdriver.PhantomJS(service_log_path=os.path.join(tempfile.gettempdir(), 'ghostdriver.log')) #browser = webdriver.PhantomJS(service_log_path=os.path.join(tempfile.gettempdir(), 'ghostdriver.log'), service_args=['--ignore-ssl-errors=true', '--ssl-protocol=tlsv1']) # Change to rely on browser print("We need to authorize access to Dropbox. Please visit the following URL and authorize the access:") print(authorize_url) print("") code = raw_input("Input the code you got: ").strip() #code = #raw_input("Enter the authorization code here: ").strip() access_token, user_id = flow.finish(code) with open(self.auth_file, 'w') as file: file.write(access_token + "\n") file.write(user_id + "\n") dbg.info('Authentication successful') return (access_token, user_id) # return: list of file paths def listdir(self, path): dic = self.client.metadata(path) lst = map(lambda x:x["path"], dic["contents"]) lst = map(lambda x:x.split("/")[-1], lst) return lst def exists(self, path): try: dic = self.client.metadata(path) if(dic.has_key("is_deleted") and dic["is_deleted"]): return False return True except: return False def get(self, path): """Get the file content Args: path: string Returns: content: string """ conn = self.client.get_file(path) content = conn.read() conn.close() return content def get_file_rev(self, path, rev): # get file of a previous version with rev hash_id content = None try: conn = self.client.get_file(path, rev=rev) content = conn.read() conn.close() except ErrorResponse as detail: #print "[get_file_rev] File doesn't exist", detail return None return content def put(self, path, content): """Upload the file Args: path: string content: string, size <= 4MB Returns: None """ from dropbox.rest import ErrorResponse strobj = StringIO(content) try: metadata = self.client.put_file(path, strobj, overwrite=False, autorename=False) except ErrorResponse as e: if e.status == 409: raise ItemAlreadyExists(e.status, e.reason) else: raise APIError(e.status, e.reason) return True def putdir(self, path): self.client.file_create_folder(path) def update(self, path, content): """Update the file Args and returns same as put """ strobj = StringIO(content) metadata = self.client.put_file(path, strobj, overwrite=True) return True def rm(self, path): """Delete the file Args: path: string """ self.client.file_delete(path) def rmdir(self, path): self.client.file_delete(path) def metadata(self, path): # only for file, not dir _md = self.client.metadata(path) md = {} md['size'] = _md['bytes'] md['mtime'] = util.convert_time(_md['modified']) return md def delta(self, path=None, cursor=None): resp = self.client.delta(cursor=cursor, path_prefix=path) cursor = resp['cursor'] changes = [] for entry in resp['entries']: event = {} if entry[1]: # we don't care about delete event event['path'] = entry[0] if entry[1]['is_dir']: event['type'] = 'folder' else: event['type'] = 'file' changes.append(event) return cursor, changes def poll(self, path=None, cursor=None, timeout=30): # timeout max 480 import requests import time from error import PollError beg_time = time.time() end_time = beg_time + timeout curr_time = beg_time url = 'https://api-notify.dropbox.com/1/longpoll_delta' params = {} changes = [] if path: path = util.format_path(path) if not cursor: cursor, _ = self.delta(path) curr_time = time.time() while True: params['cursor'] = cursor params['timeout'] = max(30, int(end_time - curr_time)) # minimum 30 second resp = requests.request('GET', url, params=params) obj = resp.json() if 'error' in obj: raise PollError(resp.status_code, resp.text) if obj['changes']: cursor, _delta = self.delta(path, cursor) changes.extend(_delta) if changes: break curr_time = time.time() if curr_time > end_time: break return cursor, changes def init_log(self, path): if not self.exists(path): self.put(path, '') def reset_log(self, path): if self.exists(path): self.rm(path) def append(self, path, msg): self.update(path, msg) def get_logs(self, path, last_clock): length = 5 # latest revision comes first revisions = self.client.revisions(path, rev_limit=length) if not revisions: return [], None new_logs = [] new_clock = revisions[0]['rev'] end = False # if reach to end while True: for metadata in revisions: if last_clock and metadata['rev'] == last_clock: end = True break if end: break if len(revisions) < length: break # still have logs unread, double the length length *= 2 revisions = self.client.revisions(path, rev_limit=length) # download the content of unseen rev for metadata in revisions: if last_clock and metadata['rev'] == last_clock: break if 'is_deleted' in metadata and metadata['is_deleted']: continue msg = self.get_file_rev(path, metadata['rev']) if len(msg) > 0: new_logs.insert(0, msg) return new_logs, new_clock def __msg_index(self, fn): return eval(fn[3:]) def init_log2(self, path): if not self.exists(path): self.putdir(path) def append2(self, path, msg): path = util.format_path(path) lst = sorted(self.listdir(path)) if lst: index = self.__msg_index(lst[-1]) + 1 else: index = 0 while True: fn = 'msg%d' % index fpath = path + '/' + fn try: self.put(fpath, msg) except ItemAlreadyExists: index += 1 else: break def get_logs2(self, path, last_clock): path = util.format_path(path) lst = self.listdir(path) if not lst: return [], None srt = {} for fn in lst: srt[self.__msg_index(fn)] = fn lst = [srt[i] for i in sorted(srt.keys(), reverse=True)] new_logs = [] new_clock = self.__msg_index(lst[0]) for fn in lst: if last_clock == None and self.__msg_index(fn) == last_clock: break msg = self.get(path + '/' + fn) new_logs.insert(0, msg) return new_logs, new_clock def share(self, path, target_email): url = "https://www.dropbox.com/" print 'Get access token from Dropbox' print 'Open auth url:', url browser = webdriver.PhantomJS(service_log_path=os.path.join(tempfile.gettempdir(), 'ghostdriver.log'), service_args=['--ignore-ssl-errors=true', '--ssl-protocol=tlsv1']) browser.get(url) try: wait = WebDriverWait(browser, 30) btn = wait.until(EC.element_to_be_clickable((By.XPATH, "//div[@id='sign-in']/a"))) btn.click() email = wait.until(EC.element_to_be_clickable((By.XPATH, "//input[@id='login_email']"))) email.send_keys(raw_input("Enter your Dropbox email:")) pwd = browser.find_element_by_xpath("//input[@id='login_password']") pwd.send_keys(getpass.getpass("Enter your Dropbox password:"******"//a[text()='%s']" % path))) target_folder.click() wait.until(EC.title_contains("%s" % path)) share_btn = browser.find_element_by_xpath("//a[@id='global_share_button']") share_btn.click() target = wait.until(EC.element_to_be_clickable((By.XPATH, "//form[@class='invite-more-form']//input[@spellcheck][@type='text']"))) target.send_keys(target_email) confirm_btn = browser.find_element_by_xpath("//form[@class='invite-more-form']//input[@type='button'][1]") confirm_btn.click() except: print(browser.title) assert False # print(browser.current_url) # print(browser.page_source) pass
def update_filetree(): if 'access_token' not in session: abort(400) client = DropboxClient(session['access_token']) user_id = session['user_id'] has_more = True cursor = DBC.get_delta_cursor(user_id) changed = False # if we do work in memory, keep a flag so # we know to consolidate the work we did # in memory and save it to the DB do_work_in_memory = False memcache = { 'tree': None , 'tab' : None } while has_more: delta = client.delta(cursor) if delta['reset'] is True: DBC.clear(user_id) if len(delta['entries']) > 0: changed = True entries = delta['entries'] # do we want batch our work in memmory, or flush directly to the DB? # depends on the number of entries; see DELTA_DO_WORK_IN_MEMORY_THRESHOLD if do_work_in_memory or len(entries) > DELTA_DO_WORK_IN_MEMORY_THRESHOLD: do_work_in_memory = True if memcache['tree'] is None: memcache['tree'] = DBC.read(user_id) memcache['tab'] = build_index_table(memcache['tree']) process_delta_entries_in_memory(entries, memcache['tab']) # print "processed %s entries in memory, deferring DB write..." % len(entries) else: for entry in entries: [path, metadata] = entry if metadata is None: DBC.delete_path(user_id, path) else: DBC.update_path(user_id, metadata['path'], metadata) # print "processed %s entries by directly updating DB" % len(entries) has_more = delta['has_more'] cursor = delta['cursor'] tree = None if do_work_in_memory: # flush our in-memory tree into the DB DBC.overwrite(user_id, memcache['tree'], cursor) tree = prune(memcache['tree'], MAX_DIRECTORY_DEPTH) else: # only update the cursor DBC.set_delta_cursor(user_id, cursor) tree = DBC.read(session['user_id'], MAX_DIRECTORY_DEPTH) result = { 'changed': changed , 'cursor' : cursor , 'tree' : tree } return result
class DropPy: def __init__(self, directory="/", key=None, secret=None, key_save="./", cursor=None): """Intialize a Dropbox connection, at directory specified or root. string directory. Location to consider root, relative to Dropbox root. string keysLoc. Location to store authentication json Any exceptions during the authorization process are not caught. See https://www.dropbox.com/developers/core/docs/python """ self.key = key self.secret = secret self.key_save = key_save self._dropbox_date = "%a, %d %b %Y %H:%M:%S %z" access_token = self._auth() self.cursor = cursor if directory[-1] != "/": directory += "/" self.directory = directory self.client = DropboxClient(access_token) def _auth(self): """Attempts to load an access token from key_save If unavailable, will guide the user through authentication """ pathname = _path(self.key_save, ".droppy") if path.exists(pathname): try: with open(pathname) as token: access_token = token.read() return access_token except: # If this fails for any reason, just have them reauth pass client = DropboxOAuth2FlowNoRedirect(self.key, self.secret) auth_url = client.start() print("Visit for authorization:\n{}".format(auth_url)) auth_code = input("Enter the authorization key: ") access_token, user_id = client.finish(auth_code) self._writeToken(access_token) return access_token def _writeToken(self, access_token=""): """Writes the access token to specified key location""" pathname = _path(self.key_save, ".droppy") with open(pathname, "w+") as token_file: token_file.write(access_token) def _get_dropbox_datetime(self, date_string): return datetime.strptime(date_string, self._dropbox_date) def _set_dropbox_datetime(self, datetime_obj): return datetime_obj.strftime(self._dropbox_date) def logout(self): """Destroys the current access token. The user will have to reauth.""" self.client.disable_access_token() def account_info(self): """Returns account info such as quota, email and display name.""" return self.client.account_info() def download(self, target, to="./", rev=None, start=None, length=None): """Downloads the current file to the specified, or local, directory target The path to the file that will be downloaded. If the first character is the forward slash ("/"), it will ignore the relative path of the DropPy instance, and instead begin from the Dropbox root to The local directory to download the file to. Defaults to current directory rev Optional previous rev value of the file to be downloaded. start Optional byte value from which to start downloading. length Optional length in bytes for partially downloading the file. If length is specified but start is not, then the last length bytes will be downloaded. Raises 400: Bad request (may be due to many things; check e.error for details). 404: No file was found at the given path, or the file that was there was deleted. 200: Request was okay but response was malformed in some way. """ filename = target.split("/").pop() target = _path(self.directory, target) with open(_path(to, filename), "wb") as out: with self.client.get_file(target, rev, start, length) as download: out.write(download.read()) def upload_chunked(self, fd, to=None, length=None): """Creates a chunked uploader If the file exists on the server, another is uploaded with (#) as a suffix fd File object from which the data will be sourced from to Optional path to upload to. Defaults to initialized directory length The number of bytes to upload. Defaults to full file. """ if length is None: length = path.getsize(fd.name) if to is None: to = self.directory to = _path(self.directory, to) filename = path.split(fd.name)[1] if length < 1: self.client.put_file(_path(to, filename), fd) else: uploader = self.client.get_chunked_uploader(fd, length) while uploader.offset < length: uploader.upload_chunked() uploader.finish(_path(to, filename)) def delta(self): """Retreive delta information from Dropbox. Allows you to monitor for changes. First change, cursor of None, returns all files. Subsequent calls, with cursor provided by previous calls, will provide changed files Returns all entries """ result = self.client.delta(self.cursor, _trim_d(self.directory)) self.cursor = result["cursor"] entries = result["entries"] while result["has_more"]: result = self.client.delta(self.cursor, _trim_d(self.directory)) self.cursor = result["cursor"] entries = entries + result["entries"] return entries def longpoll(self): pass def move(self, source, destination): """Moves a file from one place to another. Both source and destination are relative to initalized folder, unless preceded by directory altering prefix (eg. "/", "../", "../newFolder") source Origin of the file to move destination Place to move the file to Raises 400: Bad request (may be due to many things; check e.error for details). 403: An invalid move operation was attempted (e.g. there is already a file at the given destination, or moving a shared folder into a shared folder). 404: No file was found at given from_path. 503: User over storage quota. """ source = _path(self.directory, source) destination = _path(self.directory, destination) self.client.file_move(source, destination) def get_remote_files(self, directory="", deleted=False): remote_path = _path(self.directory, directory) metadata = self.client.metadata(remote_path, include_deleted=deleted) remote_files = metadata["contents"] for item in remote_files: if item["is_dir"]: remote_files = remote_files + self.get_remote_files(item["path"], deleted) return remote_files def sync(self, local=getcwd(), deleted=False, hidden=False): """Syncs the local file system to the remote file system. By default, will not delete any files that differ, only add new files local The local file directory to recusively sync with the remote. Default ./ delete Delete local files to keep in sync Does not delete remote files as it may not be running 24/7, tracking deletions Default False hidden Include hidden files in sync Default False """ local = path.abspath(local) if not path.isdir(local): raise ValueError("sync requires local to be a directory.") if local[-1] != "/": local += "/" local_files = {} for item in list(Path(local).glob("**/*")): if not hidden and ( search("/\.\w+", str(item)) \ or match("\.\w+", str(item)) ): continue local_files[str(item)[len(local):]] = { "mtime": int(item.stat().st_mtime) } remote_files_meta = self.get_remote_files(deleted=deleted) remote_files = {} remote_dirs = [] for item in remote_files_meta: isDeleted = "is_deleted" in item and item["is_deleted"] i = item["path"] mtime = self._get_dropbox_datetime(item["modified"]) if not hidden and (search("/\.\w+", str(i)) or match("\.\w+", str(i))): continue # Dropbox is not case sensitive, so make sure we preserve for local if i.startswith(self.directory) or i.startswith(self.directory.lower()): i = i[len(self.directory):] if item["is_dir"]: remote_dirs.append(i) remote_files[i] = { "mtime": int(mtime.strftime("%s")), "deleted": isDeleted } download = sorted([item for item in remote_files if item not in \ local_files and not remote_files[item]["deleted"]]) delete = sorted([item for item in remote_files if item in \ local_files and remote_files[item]["deleted"]]) #upload = sorted([item for item in local_files if item not in remote_files]) #for item in upload: # item_path = local / Path(item) # # if item_path.is_dir(): # self.client.file_create_folder(_path(self.directory, item)) # else: # parts = [part for part in item_path.parts if part not in Path(local).parts] # to = "/".join(parts[:-1]) # with open(str(item_path), "rb") as f: # self.upload_chunked(f,to=to) for item in download: item_path = Path(item) if item in remote_dirs: mkdir(_path(local, item)) else: parts = item_path.parts to = _path(local, "/".join(parts[:-1])) self.download(_path(self.directory + item), to=to) for item in delete: p = Path(local) / item if p.is_dir(): rmtree( str(p) ) elif p.is_file(): remove( str(p) )
from dropbox.client import DropboxClient from pprint import pprint from time import sleep token = "DiUSTNb6VUEAAAAAAAAApYAZSaPDNOcaxBBVGVwUbrW9gqTYRYlQwUWAvJAemayf" client = DropboxClient(token) cursor = None while True: has_more = True while has_more: result = client.delta(cursor) for path, metadata in result["entries"]: pprint(path) pprint(metadata) cursor = result["cursor"] has_more = result["has_more"] sleep(1)