def auth_callback(): """ http://localhost/?code=ANdNAVhyhqirUelHGEHA&scope=clouddrive%3Aread_all+clouddrive%3Awrite """ db.get()._p_jar.sync() code = f.request.args.get('code') auth_callback_url = utils.get_url('auth_callback', '/authcallback', True) configure_url = utils.get_url('configure_view', '/configure', True) api.authorize(code, auth_callback_url) return f.redirect(configure_url)
def _sync_folder(folder): db.update() config = db.get()['config'] excluded = config['excluded'] try: update_frequency = int(config.get('update_frequency', 60 * 60 * 24)) except: update_frequency = 60 * 60 * 24 folder_node = get_folder_node(folder) transaction.commit() for filename in os.listdir(folder): if filename[0] == '.': continue filepath = os.path.join(folder, filename) dive_out = False for excluded_path in excluded: if fnmatch(filepath, excluded_path): dive_out = True break if dive_out: continue if os.path.isdir(filepath): _sync_folder(filepath) continue if '.' not in filename: continue result = _handle_file(folder_node, filepath, filename, update_frequency) if result == IGNORED: counts['ignored'] += 1 elif result == UPLOADED: counts['uploaded'] += 1 elif result == ERRORED: counts['errored'] += 1
def record_fileprogress(done, total): db.update() percent = int((float(done) / float(total)) * 100) root = db.get() if 'current_file' in root: root['current_file']['percent'] = percent transaction.commit()
def sync(): counts = {'ignored': 0, 'uploaded': 0, 'errored': 0} config = db.get()['config'] for folder in config.get('folders', []): if os.path.exists(folder): sync_folder(folder, counts) return counts
def get_folder_node(folder): root = db.get() current = root['index'] for part in folder.split('/')[1:]: found = False for node in current.get('children', {}).values(): if node['name'] == part: current = node found = True break if not found: # parents = current['parents'] + [current['id']] new = api.call('nodes', 'metadata', 'POST', { 'name': part, 'kind': 'FOLDER', 'parents': [current['id']] }).json() if new.get('code') == 'NAME_ALREADY_EXISTS': _id = _get_id(new) new = api.call('nodes/' + _id, 'metadata').json() db.update() new = OOBTree(new) new['children'] = OOBTree() if 'children' not in current: current['children'] = OOBTree() current['children'][new['name']] = new current = new return current
def initialize_db(): root_node = api.get_root_folder()['data'][0] root = db.get() root['root_node'] = root_node virtual_root = get_virtual_root(root_node) index = OOBTree(virtual_root) db.update() root['index'] = index
def get_node(path): node = db.get()['index'] if path == '/': return node parts = path.strip('/').split('/') for part in parts: node = node['children'][part] return node
def clean(): root = db.get() config = db.get()['config'] folders = config.get('folders', []) def process(path, node, parent): if 'DELETED' in path: return if node['kind'] == 'FILE': # check if exists if not os.path.exists(path): valid = False for folder in folders: if path.startswith(folder): valid = os.path.exists(folder) break # base folder is valid, so it is really missing if valid: # XXX move to deleted folder deleted_folder = get_folder_node(get_deleted_folder(path)) resp = api.call('nodes/%s/children' % deleted_folder['id'], method='POST', endpoint_type='metadata', body={ 'fromParent': parent['id'], 'childId': node['id'] }) if resp.status_code == 200: move_node(parent, deleted_folder, resp.json()) elif resp.status_code == 400: data = resp.json() if data['code'] == 'INVALID_PARENT': if data['info']['parentId'] == deleted_folder[ 'id']: # already moved, now move the node move_node(parent, deleted_folder, node) else: # go through and process each child if 'children' not in node: return for name, child in node['children'].items(): child_path = os.path.join(path, name) process(child_path, child, node) process('/', root['index'], None)
def get_config_with_defaults(): root = db.get() config = root.get('config', {}) defaults = deepcopy(_defaults) for key in fieldnames: value = config.get(key) if not value: config[key] = defaults[key] return config
def record_filestart(filename): db.update() root = db.get() root['current_file'] = PersistentDict({ 'filename': filename, 'percent': 0, 'filesize': os.stat(filename).st_size }) transaction.commit()
def browse(): root = db.get() root._p_jar.sync() path = f.request.args.get('path') if path == '/': node = db.get()['index'] else: node = utils.get_node(path) data = _export_node(node) data['children'] = [] data['path'] = path if path == '/': path = '' for key, child in node['children'].items(): node = _export_node(child) node['path'] = path + '/' + key data['children'].append(node) return jsonify(**data)
def clean(): root = db.get() config = db.get()['config'] folders = config.get('folders', []) def process(path, node, parent): if 'DELETED' in path: return if node['kind'] == 'FILE': # check if exists if not os.path.exists(path): valid = False for folder in folders: if path.startswith(folder): valid = os.path.exists(folder) break # base folder is valid, so it is really missing if valid: # XXX move to deleted folder deleted_folder = get_folder_node(get_deleted_folder(path)) resp = api.call( 'nodes/%s/children' % deleted_folder['id'], method='POST', endpoint_type='metadata', body={ 'fromParent': parent['id'], 'childId': node['id']} ) if resp.status_code == 200: move_node(parent, deleted_folder, resp.json()) elif resp.status_code == 400: data = resp.json() if data['code'] == 'INVALID_PARENT': if data['info']['parentId'] == deleted_folder['id']: # already moved, now move the node move_node(parent, deleted_folder, node) else: # go through and process each child if 'children' not in node: return for name, child in node['children'].items(): child_path = os.path.join(path, name) process(child_path, child, node) process('/', root['index'], None)
def run(argv=sys.argv): while True: try: _run() except: logger.error('Unknown error running sync', exc_info=True) db.update() root = db.get() root['errored'] = [] transaction.commit() time.sleep(60 * 5)
def sync(): counts = { 'ignored': 0, 'uploaded': 0, 'errored': 0 } config = db.get()['config'] for folder in config.get('folders', []): if os.path.exists(folder): sync_folder(folder, counts) return counts
def status(): root = db.get() root._p_jar.sync() config = root.get('config', {}) configured = configurator.valid(config) action = root.get('action', {}) what = action.get('what') when = action.get('when') current = root.get('current_file') return jsonify( configured=configured, what=what, when=when, current_file=current, errored=root.get('errored', [])[-20:], **config)
def _run(argv=sys.argv): root = db.get() while not api.get_credentials(): stats.record_action(root, 'Application not authorized') time.sleep(5) if 'index' not in root: initialize_db() while True: while 'config' not in root: time.sleep(5) root = db.get() if 'metadata' not in root: metadata = root['metadata'] = OOBTree() else: metadata = root['metadata'] if (time.time() - metadata.get('endpoint_last_retrieved', 0)) > ( 60 * 60 * 24 * 3): api.store_endpoint() # reset errors root['errored'] = [] transaction.commit() stats.record_action(root, 'Syncing files') stats.record_stats(root, sync()) stats.record_action(root, 'Cleaning files') clean() stats.record_action(root, 'Packing database') storage = db.get_storage() storage.pack(time.time(), wait=True) stats.record_action(root, 'Taking a break for 10 minutes...') time.sleep(60 * 10)
def _run(argv=sys.argv): root = db.get() while not api.get_credentials(): stats.record_action(root, 'Application not authorized') time.sleep(5) if 'index' not in root: initialize_db() while True: while 'config' not in root: time.sleep(5) root = db.get() if 'metadata' not in root: metadata = root['metadata'] = OOBTree() else: metadata = root['metadata'] if (time.time() - metadata.get('endpoint_last_retrieved', 0)) > (60 * 60 * 24 * 3): api.store_endpoint() # reset errors root['errored'] = [] transaction.commit() stats.record_action(root, 'Syncing files') stats.record_stats(root, sync()) stats.record_action(root, 'Cleaning files') clean() stats.record_action(root, 'Packing database') storage = db.get_storage() storage.pack(time.time(), wait=True) stats.record_action(root, 'Taking a break for 10 minutes...') time.sleep(60 * 10)
def get_virtual_root(root_node): path = 'nodes?filters=kind:FOLDER AND parents:%s' % (root_node['id']) virtual_root_resp = api.call(path, 'metadata').json() names = [n['name'] for n in virtual_root_resp['data']] sub_folder = db.get()['config']['sub_folder'] if sub_folder not in names: return api.call('nodes', 'metadata', 'POST', { 'name': sub_folder, 'kind': 'FOLDER', 'parents': [root_node['id']] }).json() else: for node in virtual_root_resp['data']: if node['name'] == sub_folder: return node
def status(): root = db.get() root._p_jar.sync() config = root.get('config', {}) configured = configurator.valid(config) action = root.get('action', {}) what = action.get('what') when = action.get('when') current = root.get('current_file') return jsonify(configured=configured, what=what, when=when, current_file=current, errored=root.get('errored', [])[-20:], **config)
def index(): if not api.get_credentials(): url = utils.get_url('auth_callback', '/authcallback', True) return f.redirect(api.get_login_url(url)) root = db.get() root._p_jar.sync() config = root.get('config', {}) configured = configurator.valid(config) action = root.get('action', {}) what = action.get('what') when = action.get('when') current = root.get('current_file') return f.render_template( 'index.html', configured=configured, what=what, when=when, current_file=dict(current), errored=root.get('errored', [])[-20:], **config)
def store_endpoint(): root = db.get() uri = '%s/drive/v1/account/endpoint' % DRIVE_ENDPOINT creds = get_credentials() resp = requests.get( uri, headers={'Authorization': 'Bearer ' + creds['access_token']}) if 'metadata' in root: metadata = root['metadata'] else: metadata = root['metadata'] = OOBTree() data = resp.json() if data.get('message') == 'Token has expired': refresh() return store_endpoint() metadata['endpoint'] = data metadata['endpoint_last_retrieved'] = time.time() transaction.commit()
def get_virtual_root(root_node): path = 'nodes?filters=kind:FOLDER AND parents:%s' % ( root_node['id'] ) virtual_root_resp = api.call(path, 'metadata').json() names = [n['name'] for n in virtual_root_resp['data']] sub_folder = db.get()['config']['sub_folder'] if sub_folder not in names: return api.call('nodes', 'metadata', 'POST', { 'name': sub_folder, 'kind': 'FOLDER', 'parents': [root_node['id']] }).json() else: for node in virtual_root_resp['data']: if node['name'] == sub_folder: return node
def store_endpoint(): root = db.get() uri = '%s/drive/v1/account/endpoint' % DRIVE_ENDPOINT creds = get_credentials() resp = requests.get(uri, headers={ 'Authorization': 'Bearer ' + creds['access_token'] }) if 'metadata' in root: metadata = root['metadata'] else: metadata = root['metadata'] = OOBTree() data = resp.json() if data.get('message') == 'Token has expired': refresh() return store_endpoint() metadata['endpoint'] = data metadata['endpoint_last_retrieved'] = time.time() transaction.commit()
def index(): if not api.get_credentials(): url = utils.get_url('auth_callback', '/authcallback', True) return f.redirect(api.get_login_url(url)) root = db.get() root._p_jar.sync() config = root.get('config', {}) configured = configurator.valid(config) action = root.get('action', {}) what = action.get('what') when = action.get('when') current = root.get('current_file') return f.render_template('index.html', configured=configured, what=what, when=when, current_file=dict(current), errored=root.get('errored', [])[-20:], **config)
def call(path, endpoint_type='content', method='GET', body=None, body_type='json', args=None): root = db.get() metadata = root.get('metadata', {}) endpoint = metadata.get('endpoint', {}) if endpoint.get('message') == 'Token has expired' or metadata == {}: refresh() store_endpoint() root._p_jar.sync() metadata = root.get('metadata', {}) endpoint = metadata.get('endpoint', {}) if endpoint_type == 'content': endpoint = endpoint.get('contentUrl') else: endpoint = endpoint.get('metadataUrl') if not endpoint: return uri = '%s/%s' % (endpoint.rstrip('/'), path.lstrip('/')) creds = get_credentials() meth = requests.get if method == 'POST': meth = requests.post elif method == 'PUT': meth = requests.put if args is None: args = {} if body: args[body_type] = body resp = meth(uri, headers={'Authorization': 'Bearer ' + creds['access_token']}, **args) if resp.status_code == 401: refresh() return call(path, endpoint_type, method, body, body_type, args) return resp
def configure_view(): error = False saved = False if f.request.method == 'POST': config = {} for field in configurator.fieldnames: value = f.request.form.get(field) if not value: error = True else: if field in configurator.list_fields: value = value.splitlines() config[field] = value if not error: root = db.get() root._p_jar.sync() root['config'] = config transaction.commit() saved = True return f.render_template( 'configure.html', error=error, saved=saved, **configurator.get_config_with_defaults())
def call(path, endpoint_type='content', method='GET', body=None, body_type='json', args=None): root = db.get() metadata = root.get('metadata', {}) endpoint = metadata.get('endpoint', {}) if endpoint.get('message') == 'Token has expired' or metadata == {}: refresh() store_endpoint() root._p_jar.sync() metadata = root.get('metadata', {}) endpoint = metadata.get('endpoint', {}) if endpoint_type == 'content': endpoint = endpoint.get('contentUrl') else: endpoint = endpoint.get('metadataUrl') if not endpoint: return uri = '%s/%s' % (endpoint.rstrip('/'), path.lstrip('/')) creds = get_credentials() meth = requests.get if method == 'POST': meth = requests.post elif method == 'PUT': meth = requests.put if args is None: args = {} if body: args[body_type] = body resp = meth(uri, headers={ 'Authorization': 'Bearer ' + creds['access_token'] }, **args) if resp.status_code == 401: refresh() return call(path, endpoint_type, method, body, body_type, args) return resp
def configure_view(): error = False saved = False if f.request.method == 'POST': config = {} for field in configurator.fieldnames: value = f.request.form.get(field) if not value: error = True else: if field in configurator.list_fields: value = value.splitlines() config[field] = value if not error: root = db.get() root._p_jar.sync() root['config'] = config transaction.commit() saved = True return f.render_template('configure.html', error=error, saved=saved, **configurator.get_config_with_defaults())
def build_index(): root_node = api.get_root_folder()['data'][0] root = db.get() root['root_node'] = root_node virtual_root = get_virtual_root(root_node) index = OOBTree(virtual_root) def process_folder(folder): if 'children' not in folder: folder['children'] = OOBTree() result = api.call('nodes/%s/children' % folder['id'], 'metadata').json() if len(result) == 0: return for node in result['data']: node = OOBTree(node) folder['children'][node['name']] = node if node['kind'] == 'FOLDER': process_folder(node) process_folder(index) db.update() root['index'] = index transaction.commit()
def _handle_file(folder_node, filepath, filename, update_frequency): return_with = None try: if filename in folder_node['children']: node = folder_node['children'][filename] if files_match(filepath, node): return IGNORED updated = parse_date(node['modifiedDate']) if os.stat(filepath).st_mtime > (updated.timestamp() + update_frequency): return IGNORED # before we try, check if it was processing, can't do anything on it if it is... if _node_processing(node): existing = api.call('nodes/' + node['id'], 'metadata', 'GET').json() if _node_processing(existing): # we recheck to see if we can update yet... return IGNORED result = overwrite_file(filepath, folder_node, node['id']) return_with = UPLOADED else: md5 = commands.md5(filepath) # we don't have file in index, check if it is already uploaded first result = api.call('nodes?filters=contentProperties.md5:%s' % md5, endpoint_type='metadata').json() found = False if len(result['data']) > 0: for node in result['data']: if node['parents'][0] == folder_node['id']: result = node found = True break if not found: result = upload_file(filepath, folder_node) return_with = UPLOADED _id = _get_id(result) if result.get('code') == 'NAME_ALREADY_EXISTS': existing = api.call('nodes/' + _id, 'metadata', 'GET').json() if _get_md5(existing) == md5: return_with = IGNORED result = existing else: if _node_processing(existing): # check if it is processing first. We aren't allowed to update... result = existing else: result = overwrite_file(filepath, folder_node, _get_id(result)) except: logger.error('Unknown error uploading file', exc_info=True) result = {} if _get_id(result) is None: db.update() root = db.get() if 'errored' not in root: root['errored'] = PersistentList() root['errored'].append(filepath) root['errored'] = root['errored'][-20:] transaction.commit() return ERRORED db.update() folder_node['children'][filename] = result transaction.commit() return return_with
def store_credentials(data): root = db.get() root['credentials'] = data transaction.commit()
def record_filedone(): db.update() root = db.get() if 'current_file' in root: del root['current_file'] transaction.commit()
def get_credentials(): root = db.get() if 'credentials' in root: return root['credentials']