def get_folder_node(folder): root = db.get() current = root['index'] for part in folder.split('/')[1:]: found = False for node in current.get('children', {}).values(): if node['name'] == part: current = node found = True break if not found: # parents = current['parents'] + [current['id']] new = api.call('nodes', 'metadata', 'POST', { 'name': part, 'kind': 'FOLDER', 'parents': [current['id']] }).json() if new.get('code') == 'NAME_ALREADY_EXISTS': _id = _get_id(new) new = api.call('nodes/' + _id, 'metadata').json() db.update() new = OOBTree(new) new['children'] = OOBTree() if 'children' not in current: current['children'] = OOBTree() current['children'][new['name']] = new current = new return current
def get_virtual_root(root_node): path = 'nodes?filters=kind:FOLDER AND parents:%s' % (root_node['id']) virtual_root_resp = api.call(path, 'metadata').json() names = [n['name'] for n in virtual_root_resp['data']] sub_folder = db.get()['config']['sub_folder'] if sub_folder not in names: return api.call('nodes', 'metadata', 'POST', { 'name': sub_folder, 'kind': 'FOLDER', 'parents': [root_node['id']] }).json() else: for node in virtual_root_resp['data']: if node['name'] == sub_folder: return node
def get_virtual_root(root_node): path = 'nodes?filters=kind:FOLDER AND parents:%s' % ( root_node['id'] ) virtual_root_resp = api.call(path, 'metadata').json() names = [n['name'] for n in virtual_root_resp['data']] sub_folder = db.get()['config']['sub_folder'] if sub_folder not in names: return api.call('nodes', 'metadata', 'POST', { 'name': sub_folder, 'kind': 'FOLDER', 'parents': [root_node['id']] }).json() else: for node in virtual_root_resp['data']: if node['name'] == sub_folder: return node
def download(): _id = f.request.args.get('id') result = api.call('nodes/' + _id, 'metadata', 'GET', body={ 'tempLink': 'true' }).json() return jsonify(url=result['tempLink'])
def overwrite_file(filepath, folder_node, _id): stats.record_filestart(filepath) filename = filepath.split('/')[-1].replace('"', 'quote').replace("'", 'quote') _type = mimetypes.guess_type(filename)[0] or 'application/octet-stream' result = api.call('nodes/%s/content?suppress=deduplication' % _id, method='PUT', args={ 'files': [('content', (filename, open(filepath, 'rb'), _type))] }).json() stats.record_filedone() return result
def process_folder(folder): if 'children' not in folder: folder['children'] = OOBTree() result = api.call('nodes/%s/children' % folder['id'], 'metadata').json() if len(result) == 0: return for node in result['data']: node = OOBTree(node) folder['children'][node['name']] = node if node['kind'] == 'FOLDER': process_folder(node)
def upload_file(filepath, folder_node): stats.record_filestart(filepath) filename = filepath.split('/')[-1].replace('"', 'quote').replace("'", 'quote') _type = mimetypes.guess_type(filename)[0] or 'application/octet-stream' result = api.call('nodes?suppress=deduplication', method='POST', body={ 'metadata': json.dumps({ 'name': filename, 'kind': 'FILE', 'parents': [folder_node['id']] }) }, body_type='data', args={ 'files': [('content', (filename, open(filepath, 'rb'), _type))] } ).json() stats.record_filedone() return result
def process(path, node, parent): if 'DELETED' in path: return if node['kind'] == 'FILE': # check if exists if not os.path.exists(path): valid = False for folder in folders: if path.startswith(folder): valid = os.path.exists(folder) break # base folder is valid, so it is really missing if valid: # XXX move to deleted folder deleted_folder = get_folder_node(get_deleted_folder(path)) resp = api.call('nodes/%s/children' % deleted_folder['id'], method='POST', endpoint_type='metadata', body={ 'fromParent': parent['id'], 'childId': node['id'] }) if resp.status_code == 200: move_node(parent, deleted_folder, resp.json()) elif resp.status_code == 400: data = resp.json() if data['code'] == 'INVALID_PARENT': if data['info']['parentId'] == deleted_folder[ 'id']: # already moved, now move the node move_node(parent, deleted_folder, node) else: # go through and process each child if 'children' not in node: return for name, child in node['children'].items(): child_path = os.path.join(path, name) process(child_path, child, node)
def upload_file(filepath, folder_node): stats.record_filestart(filepath) filename = filepath.split('/')[-1].replace('"', 'quote').replace("'", 'quote') _type = mimetypes.guess_type(filename)[0] or 'application/octet-stream' result = api.call('nodes?suppress=deduplication', method='POST', body={ 'metadata': json.dumps({ 'name': filename, 'kind': 'FILE', 'parents': [folder_node['id']] }) }, body_type='data', args={ 'files': [('content', (filename, open(filepath, 'rb'), _type))] }).json() stats.record_filedone() return result
def process(path, node, parent): if 'DELETED' in path: return if node['kind'] == 'FILE': # check if exists if not os.path.exists(path): valid = False for folder in folders: if path.startswith(folder): valid = os.path.exists(folder) break # base folder is valid, so it is really missing if valid: # XXX move to deleted folder deleted_folder = get_folder_node(get_deleted_folder(path)) resp = api.call( 'nodes/%s/children' % deleted_folder['id'], method='POST', endpoint_type='metadata', body={ 'fromParent': parent['id'], 'childId': node['id']} ) if resp.status_code == 200: move_node(parent, deleted_folder, resp.json()) elif resp.status_code == 400: data = resp.json() if data['code'] == 'INVALID_PARENT': if data['info']['parentId'] == deleted_folder['id']: # already moved, now move the node move_node(parent, deleted_folder, node) else: # go through and process each child if 'children' not in node: return for name, child in node['children'].items(): child_path = os.path.join(path, name) process(child_path, child, node)
def _handle_file(folder_node, filepath, filename, update_frequency): return_with = None try: if filename in folder_node['children']: node = folder_node['children'][filename] if files_match(filepath, node): return IGNORED updated = parse_date(node['modifiedDate']) if os.stat(filepath).st_mtime > (updated.timestamp() + update_frequency): return IGNORED # before we try, check if it was processing, can't do anything on it if it is... if _node_processing(node): existing = api.call('nodes/' + node['id'], 'metadata', 'GET').json() if _node_processing(existing): # we recheck to see if we can update yet... return IGNORED result = overwrite_file(filepath, folder_node, node['id']) return_with = UPLOADED else: md5 = commands.md5(filepath) # we don't have file in index, check if it is already uploaded first result = api.call('nodes?filters=contentProperties.md5:%s' % md5, endpoint_type='metadata').json() found = False if len(result['data']) > 0: for node in result['data']: if node['parents'][0] == folder_node['id']: result = node found = True break if not found: result = upload_file(filepath, folder_node) return_with = UPLOADED _id = _get_id(result) if result.get('code') == 'NAME_ALREADY_EXISTS': existing = api.call('nodes/' + _id, 'metadata', 'GET').json() if _get_md5(existing) == md5: return_with = IGNORED result = existing else: if _node_processing(existing): # check if it is processing first. We aren't allowed to update... result = existing else: result = overwrite_file(filepath, folder_node, _get_id(result)) except: logger.error('Unknown error uploading file', exc_info=True) result = {} if _get_id(result) is None: db.update() root = db.get() if 'errored' not in root: root['errored'] = PersistentList() root['errored'].append(filepath) root['errored'] = root['errored'][-20:] transaction.commit() return ERRORED db.update() folder_node['children'][filename] = result transaction.commit() return return_with
def download(): _id = f.request.args.get('id') result = api.call('nodes/' + _id, 'metadata', 'GET', body={'tempLink': 'true'}).json() return jsonify(url=result['tempLink'])