def record_fileprogress(done, total): db.update() percent = int((float(done) / float(total)) * 100) root = db.get() if 'current_file' in root: root['current_file']['percent'] = percent transaction.commit()
def get_folder_node(folder): root = db.get() current = root['index'] for part in folder.split('/')[1:]: found = False for node in current.get('children', {}).values(): if node['name'] == part: current = node found = True break if not found: # parents = current['parents'] + [current['id']] new = api.call('nodes', 'metadata', 'POST', { 'name': part, 'kind': 'FOLDER', 'parents': [current['id']] }).json() if new.get('code') == 'NAME_ALREADY_EXISTS': _id = _get_id(new) new = api.call('nodes/' + _id, 'metadata').json() db.update() new = OOBTree(new) new['children'] = OOBTree() if 'children' not in current: current['children'] = OOBTree() current['children'][new['name']] = new current = new return current
def record_stats(root, counts): db.update() root['last_run'] = counts if 'stats' not in root: root['stats'] = OOBTree({'uploaded': 0, 'ignored': 0, 'errored': 0}) root['stats'].update(counts) root['stats']['last_run_datetime'] = datetime.utcnow().isoformat()
def record_action(root, what): db.update() root['action'] = { 'what': what, 'when': datetime.utcnow().isoformat() } transaction.commit()
def _sync_folder(folder): db.update() config = db.get()['config'] excluded = config['excluded'] try: update_frequency = int(config.get('update_frequency', 60 * 60 * 24)) except: update_frequency = 60 * 60 * 24 folder_node = get_folder_node(folder) transaction.commit() for filename in os.listdir(folder): if filename[0] == '.': continue filepath = os.path.join(folder, filename) dive_out = False for excluded_path in excluded: if fnmatch(filepath, excluded_path): dive_out = True break if dive_out: continue if os.path.isdir(filepath): _sync_folder(filepath) continue if '.' not in filename: continue result = _handle_file(folder_node, filepath, filename, update_frequency) if result == IGNORED: counts['ignored'] += 1 elif result == UPLOADED: counts['uploaded'] += 1 elif result == ERRORED: counts['errored'] += 1
def initialize_db(): root_node = api.get_root_folder()['data'][0] root = db.get() root['root_node'] = root_node virtual_root = get_virtual_root(root_node) index = OOBTree(virtual_root) db.update() root['index'] = index
def move_node(old_parent, new_parent, node): db.update() del old_parent['children'][node['name']] if new_parent['id'] not in node['parents']: node['parents'] = [new_parent['id']] if 'children' not in new_parent: new_parent['children'] = OOBTree() new_parent['children'][node['name']] = node transaction.commit()
def record_filestart(filename): db.update() root = db.get() root['current_file'] = PersistentDict({ 'filename': filename, 'percent': 0, 'filesize': os.stat(filename).st_size }) transaction.commit()
def run(argv=sys.argv): while True: try: _run() except: logger.error('Unknown error running sync', exc_info=True) db.update() root = db.get() root['errored'] = [] transaction.commit() time.sleep(60 * 5)
def record_stats(root, counts): db.update() root['last_run'] = counts if 'stats' not in root: root['stats'] = OOBTree({ 'uploaded': 0, 'ignored': 0, 'errored': 0 }) root['stats'].update(counts) root['stats']['last_run_datetime'] = datetime.utcnow().isoformat()
def build_index(): root_node = api.get_root_folder()['data'][0] root = db.get() root['root_node'] = root_node virtual_root = get_virtual_root(root_node) index = OOBTree(virtual_root) def process_folder(folder): if 'children' not in folder: folder['children'] = OOBTree() result = api.call('nodes/%s/children' % folder['id'], 'metadata').json() if len(result) == 0: return for node in result['data']: node = OOBTree(node) folder['children'][node['name']] = node if node['kind'] == 'FOLDER': process_folder(node) process_folder(index) db.update() root['index'] = index transaction.commit()
def record_filedone(): db.update() root = db.get() if 'current_file' in root: del root['current_file'] transaction.commit()
def _handle_file(folder_node, filepath, filename, update_frequency): return_with = None try: if filename in folder_node['children']: node = folder_node['children'][filename] if files_match(filepath, node): return IGNORED updated = parse_date(node['modifiedDate']) if os.stat(filepath).st_mtime > (updated.timestamp() + update_frequency): return IGNORED # before we try, check if it was processing, can't do anything on it if it is... if _node_processing(node): existing = api.call('nodes/' + node['id'], 'metadata', 'GET').json() if _node_processing(existing): # we recheck to see if we can update yet... return IGNORED result = overwrite_file(filepath, folder_node, node['id']) return_with = UPLOADED else: md5 = commands.md5(filepath) # we don't have file in index, check if it is already uploaded first result = api.call('nodes?filters=contentProperties.md5:%s' % md5, endpoint_type='metadata').json() found = False if len(result['data']) > 0: for node in result['data']: if node['parents'][0] == folder_node['id']: result = node found = True break if not found: result = upload_file(filepath, folder_node) return_with = UPLOADED _id = _get_id(result) if result.get('code') == 'NAME_ALREADY_EXISTS': existing = api.call('nodes/' + _id, 'metadata', 'GET').json() if _get_md5(existing) == md5: return_with = IGNORED result = existing else: if _node_processing(existing): # check if it is processing first. We aren't allowed to update... result = existing else: result = overwrite_file(filepath, folder_node, _get_id(result)) except: logger.error('Unknown error uploading file', exc_info=True) result = {} if _get_id(result) is None: db.update() root = db.get() if 'errored' not in root: root['errored'] = PersistentList() root['errored'].append(filepath) root['errored'] = root['errored'][-20:] transaction.commit() return ERRORED db.update() folder_node['children'][filename] = result transaction.commit() return return_with
def record_action(root, what): db.update() root['action'] = {'what': what, 'when': datetime.utcnow().isoformat()} transaction.commit()