def run_scan(): remove_missing() prefs = settings.preferences prefix = os.path.join(os.path.expanduser(prefs["libraryPath"]), "Books" + os.sep) if not prefix[-1] == os.sep: prefix += os.sep assert isinstance(prefix, str) books = [] for root, folders, files in os.walk(prefix): for f in files: if not state.tasks.connected: return # if f.startswith('._') or f == '.DS_Store': if f.startswith("."): continue f = os.path.join(root, f) ext = f.split(".")[-1] if ext in extensions: books.append(f) position = 0 added = 0 for f in ox.sorted_strings(books): if not state.tasks.connected: return position += 1 with db.session(): id = media.get_id(f) file = File.get(id) if not file: file = add_file(id, f, prefix, f) added += 1 trigger_event("change", {})
def run_scan(): remove_missing() prefs = settings.preferences prefix = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep) if not prefix[-1] == os.sep: prefix += os.sep assert isinstance(prefix, str) books = [] for root, folders, files in os.walk(prefix): for f in files: if not state.tasks.connected: return #if f.startswith('._') or f == '.DS_Store': if f.startswith('.'): continue f = os.path.join(root, f) ext = f.split('.')[-1] if ext in extensions: books.append(f) position = 0 added = 0 for f in ox.sorted_strings(books): if not state.tasks.connected: return position += 1 with db.session(): id = media.get_id(f) file = File.get(id) if not file: file = add_file(id, f, prefix, f) added += 1 trigger_event('change', {})
def apply_change(cls, user, change, trigger=True): revision, timestamp, data = change last = cls.query.filter_by( user_id=user.id).order_by('-revision').first() next_revision = last.revision + 1 if last else 0 if revision >= next_revision: c = cls() c.created = datetime.utcnow() c.timestamp = timestamp c.user_id = user.id c.revision = revision c.data = data args = json.loads(data) logger.debug('apply change from %s: %s', user.name, args) if getattr(c, 'action_' + args[0])(user, timestamp, *args[1:]): logger.debug('change applied') state.db.session.add(c) state.db.session.commit() if trigger: trigger_event('change', {}) return True else: logger.debug('revsion does not match! got %s expecting %s', revision, next_revision) return False
def download(self, item): from item.models import Transfer self.resolve() url = '%s/get/%s' % (self.url, item.id) headers = { 'X-Node-Protocol': settings.NODE_PROTOCOL, 'User-Agent': settings.USER_AGENT, } t1 = datetime.utcnow() logger.debug('download %s', url) self._opener.addheaders = list(zip(headers.keys(), headers.values())) try: r = self._opener.open(url, timeout=self.TIMEOUT * 2) except: logger.debug('openurl failed %s', url, exc_info=1) return False if r.getcode() == 200: try: fileobj = r if r.headers.get('content-encoding', None) == 'gzip': fileobj = gzip.GzipFile(fileobj=r) content = b'' ct = datetime.utcnow() size = 0 for chunk in iter(lambda: fileobj.read(16 * 1024), b''): content += chunk size += len(chunk) since_ct = (datetime.utcnow() - ct).total_seconds() if since_ct > 1: ct = datetime.utcnow() t = Transfer.get(item.id) t.progress = len(content) / item.info['size'] t.save() trigger_event('transfer', { 'id': item.id, 'progress': t.progress }) if state.bandwidth: state.bandwidth.download(size / since_ct) size = 0 ''' content = fileobj.read() ''' if state.bandwidth: state.bandwidth.download(size / since_ct) size = 0 t2 = datetime.utcnow() duration = (t2 - t1).total_seconds() if duration: self.download_speed = len(content) / duration logger.debug('SPEED %s', ox.format_bits(self.download_speed)) return item.save_file(content) except: logger.debug('download failed %s', url, exc_info=1) return False else: logger.debug('FAILED %s', url) return False
def cancelImport(data): state.activity['cancel'] = True trigger_event('activity', { 'activity': 'import', 'progress': [0, 0], 'status': {'code': 200, 'text': 'canceled'} }) return {}
def update(self): bandwidth = {'up': self.up, 'down': self.down} if bandwidth != self._last: trigger_event('bandwidth', bandwidth) self._last = bandwidth self.up = 0 self.down = 0 state.main.call_later(1, self.update)
def api_removePeering(user_id, message): user = User.get(user_id) if user: user.info['message'] = message user.update_peering(False) trigger_event('peering.remove', user.json()) return True return False
def download(self, item): from item.models import Transfer self.resolve() url = '%s/get/%s' % (self.url, item.id) headers = { 'X-Node-Protocol': settings.NODE_PROTOCOL, 'User-Agent': settings.USER_AGENT, } t1 = datetime.utcnow() logger.debug('download %s', url) self._opener.addheaders = list(zip(headers.keys(), headers.values())) try: r = self._opener.open(url, timeout=self.TIMEOUT*2) except: logger.debug('openurl failed %s', url, exc_info=1) return False if r.getcode() == 200: try: fileobj = r if r.headers.get('content-encoding', None) == 'gzip': fileobj = gzip.GzipFile(fileobj=r) content = b'' ct = datetime.utcnow() size = 0 for chunk in iter(lambda: fileobj.read(16*1024), b''): content += chunk size += len(chunk) since_ct = (datetime.utcnow() - ct).total_seconds() if since_ct > 1: ct = datetime.utcnow() t = Transfer.get(item.id) t.progress = len(content) / item.info['size'] t.save() trigger_event('transfer', { 'id': item.id, 'progress': t.progress }) if state.bandwidth: state.bandwidth.download(size/since_ct) size = 0 ''' content = fileobj.read() ''' if state.bandwidth: state.bandwidth.download(size/since_ct) size = 0 t2 = datetime.utcnow() duration = (t2-t1).total_seconds() if duration: self.download_speed = len(content) / duration logger.debug('SPEED %s', ox.format_bits(self.download_speed)) return item.save_file(content) except: logger.debug('download failed %s', url, exc_info=1) return False else: logger.debug('FAILED %s', url) return False
def api_rejectPeering(user_id, message): user = User.get(user_id) if user: if not user.info: user.info = {} user.info['message'] = message user.update_peering(False) trigger_event('peering.reject', user.json()) return True return False
def apply_changes(cls, user, changes): trigger = changes for change in changes: if not cls.apply_change(user, change, trigger=False): logger.debug('FAIL %s', change) trigger = False break return False if trigger: trigger_event('change', {}); return True
def apply_changes(cls, user, changes): trigger = changes for change in changes: if not cls.apply_change(user, change, trigger=False): logger.debug('FAIL %s', change) trigger = False break return False if trigger: trigger_event('change', {}) return True
def cancelImport(data): state.activity['cancel'] = True trigger_event( 'activity', { 'activity': 'import', 'progress': [0, 0], 'status': { 'code': 200, 'text': 'canceled' } }) return {}
def update_online(): online = state.tor and state.tor.is_online() if online != state.online: state.online = online trigger_event('status', { 'id': settings.USER_ID, 'online': state.online }) if not settings.server.get('migrated_id', False): r = directory.put(settings.sk, { 'id': settings.USER_ID, }) logger.debug('push id to directory %s', r) if r: settings.server['migrated_id'] = True
def api_acceptPeering(user_id, username, message): user = User.get(user_id) logger.debug('incoming acceptPeering event: pending: %s', user.pending) if user and user.pending == 'sent': if not user.info: user.info = {} user.info['username'] = username user.info['message'] = message user.update_name() user.update_peering(True, username) state.nodes.queue('add', user.id) trigger_event('peering.accept', user.json()) return True elif user and user.peered: return True return False
def api_requestPeering(user_id, username, message): user = User.get_or_create(user_id) if not user.info: user.info = {} if not user.peered: if user.pending == 'sent': user.info['message'] = message user.update_peering(True, username) user.update_name() else: user.pending = 'received' user.info['username'] = username user.info['message'] = message user.update_name() user.save() trigger_event('peering.request', user.json()) return True return False
def peering(self, action): u = self.user if action in ('requestPeering', 'acceptPeering'): r = self.request(action, settings.preferences['username'], u.info.get('message')) else: r = self.request(action, u.info.get('message')) if r != None: u.queued = False if 'message' in u.info: del u.info['message'] u.save() else: logger.debug('peering failed? %s %s', action, r) if action in ('cancelPeering', 'rejectPeering', 'removePeering'): self.online = False else: self.go_online() trigger_event('peering.%s'%action.replace('Peering', ''), u.json()) return True
def peering(self, action): u = self.user if action in ('requestPeering', 'acceptPeering'): r = self.request(action, settings.preferences['username'], u.info.get('message')) else: r = self.request(action, u.info.get('message')) if r != None: u.queued = False if 'message' in u.info: del u.info['message'] u.save() else: logger.debug('peering failed? %s %s', action, r) if action in ('cancelPeering', 'rejectPeering', 'removePeering'): self.online = False else: self.go_online() trigger_event('peering.%s' % action.replace('Peering', ''), u.json()) return True
def save_file(self, content): u = state.user() f = File.get(self.id) content_id = media.get_id(data=content) if content_id != self.id: logger.debug('INVALID CONTENT %s vs %s', self.id, content_id) return False if not f: path = 'Downloads/%s.%s' % (self.id, self.info['extension']) info = self.info.copy() for key in ('mediastate', 'coverRatio', 'previewRatio'): if key in info: del info[key] f = File.get_or_create(self.id, info, path=path) path = self.get_path() if not os.path.exists(path): ox.makedirs(os.path.dirname(path)) with open(path, 'wb') as fd: fd.write(content) if u not in self.users: self.add_user(u) t = Transfer.get_or_create(self.id) t.progress = 1 t.save() self.added = datetime.utcnow() Changelog.record(u, 'additem', self.id, f.info) self.update() f.move() self.update_icons() self.save() trigger_event('transfer', { 'id': self.id, 'progress': 1 }) return True else: logger.debug('TRIED TO SAVE EXISTING FILE!!!') t = Transfer.get_or_create(self.id) t.progress = 1 t.save() self.update() return False
def scrape_queue(self): import item.models scraped = False for s in item.models.Scrape.query.filter( item.models.Scrape.added != None, ).order_by( item.models.Scrape.added): if not self._running: return True logger.debug('scrape %s', s.item) try: if s.item.scrape(): for f in s.item.files: f.move() s.item.update_icons() s.item.save() s.remove() trigger_event('change', {}) scraped = True except: logger.debug('scrape failed %s', s.item, exc_info=1) return scraped
def scrape_queue(self): import item.models scraped = False for s in item.models.Scrape.query.filter( item.models.Scrape.added!=None, ).order_by(item.models.Scrape.added): if not self._running: return True logger.debug('scrape %s', s.item) try: if s.item.scrape(): for f in s.item.files: f.move() s.item.update_icons() s.item.save() s.remove() trigger_event('change', {}) scraped = True except: logger.debug('scrape failed %s', s.item, exc_info=1) return scraped
def apply_change(cls, user, change, trigger=True): revision, timestamp, data = change last = cls.query.filter_by(user_id=user.id).order_by('-revision').first() next_revision = last.revision + 1 if last else 0 if revision >= next_revision: c = cls() c.created = datetime.utcnow() c.timestamp = timestamp c.user_id = user.id c.revision = revision c.data = data args = json.loads(data) logger.debug('apply change from %s: %s', user.name, args) if getattr(c, 'action_' + args[0])(user, timestamp, *args[1:]): logger.debug('change applied') state.db.session.add(c) state.db.session.commit() if trigger: trigger_event('change', {}); return True else: logger.debug('revsion does not match! got %s expecting %s', revision, next_revision) return False
def run(self): import item.scan while self.connected: m = self.q.get() if m: try: action, data = m if action == 'ping': trigger_event('pong', data) elif action == 'import': item.scan.run_import(data) elif action == 'scan': item.scan.run_scan() elif action == 'update': trigger_event('error', {'error': 'not implemented'}) else: trigger_event('error', {'error': 'unknown action'}) except: logger.debug('task failed', exc_info=1) self.q.task_done()
def run_import(options=None): options = options or {} logger.debug('run_import') prefs = settings.preferences prefix = os.path.expanduser(options.get('path', prefs['importPath'])) if os.path.islink(prefix): prefix = os.path.realpath(prefix) if not prefix[-1] == os.sep: prefix += os.sep prefix_books = os.path.join(os.path.expanduser(prefs['libraryPath']), 'Books' + os.sep) prefix_imported = os.path.join(prefix_books, 'Imported' + os.sep) if prefix_books.startswith(prefix) or prefix.startswith(prefix_books): error = 'invalid path' elif not os.path.exists(prefix): error = 'path not found' elif not os.path.isdir(prefix): error = 'path must be a folder' else: error = None if error: trigger_event( 'activity', { 'activity': 'import', 'progress': [0, 0], 'status': { 'code': 404, 'text': error } }) state.activity = {} return listname = options.get('list') if listname: listitems = [] assert isinstance(prefix, str) books = [] count = 0 for root, folders, files in os.walk(prefix): for f in files: if not state.tasks.connected: return #if f.startswith('._') or f == '.DS_Store': if f.startswith('.'): continue f = os.path.join(root, f) ext = f.split('.')[-1] if ext in extensions: books.append(f) count += 1 if state.activity.get('cancel'): state.activity = {} return if count % 1000 == 0: state.activity = { 'activity': 'import', 'path': prefix, 'progress': [0, count], } trigger_event('activity', state.activity) state.activity = { 'activity': 'import', 'path': prefix, 'progress': [0, len(books)], } trigger_event('activity', state.activity) position = 0 added = 0 last = 0 for f in ox.sorted_strings(books): position += 1 if not os.path.exists(f): continue with db.session(): id = media.get_id(f) file = File.get(id) if not file: f_import = f f = f.replace(prefix, prefix_imported) ox.makedirs(os.path.dirname(f)) if options.get('mode') == 'move': shutil.move(f_import, f) else: shutil.copy(f_import, f) file = add_file(id, f, prefix_books, f_import) file.move() added += 1 if listname: listitems.append(file.item.id) if time.time() - last > 5: last = time.time() state.activity = { 'activity': 'import', 'progress': [position, len(books)], 'path': prefix, 'added': added, } trigger_event('activity', state.activity) if state.activity.get('cancel'): state.activity = {} return with db.session(): if listname and listitems: l = List.get(settings.USER_ID, listname) if l: l.add_items(listitems) trigger_event( 'activity', { 'activity': 'import', 'progress': [position, len(books)], 'path': prefix, 'status': { 'code': 200, 'text': '' }, 'added': added, }) state.activity = {} remove_empty_folders(prefix_books) if options.get('mode') == 'move': remove_empty_folders(prefix)
def trigger_status(self): if self.online is not None: trigger_event('status', { 'id': self.user_id, 'online': self.online })
def run_import(options=None): options = options or {} logger.debug("run_import") prefs = settings.preferences prefix = os.path.expanduser(options.get("path", prefs["importPath"])) if os.path.islink(prefix): prefix = os.path.realpath(prefix) if not prefix[-1] == os.sep: prefix += os.sep prefix_books = os.path.join(os.path.expanduser(prefs["libraryPath"]), "Books" + os.sep) prefix_imported = os.path.join(prefix_books, "Imported" + os.sep) if prefix_books.startswith(prefix) or prefix.startswith(prefix_books): error = "invalid path" elif not os.path.exists(prefix): error = "path not found" elif not os.path.isdir(prefix): error = "path must be a folder" else: error = None if error: trigger_event("activity", {"activity": "import", "progress": [0, 0], "status": {"code": 404, "text": error}}) state.activity = {} return listname = options.get("list") if listname: listitems = [] assert isinstance(prefix, str) books = [] count = 0 for root, folders, files in os.walk(prefix): for f in files: if not state.tasks.connected: return # if f.startswith('._') or f == '.DS_Store': if f.startswith("."): continue f = os.path.join(root, f) ext = f.split(".")[-1] if ext in extensions: books.append(f) count += 1 if state.activity.get("cancel"): state.activity = {} return if count % 1000 == 0: state.activity = {"activity": "import", "path": prefix, "progress": [0, count]} trigger_event("activity", state.activity) state.activity = {"activity": "import", "path": prefix, "progress": [0, len(books)]} trigger_event("activity", state.activity) position = 0 added = 0 last = 0 for f in ox.sorted_strings(books): position += 1 if not os.path.exists(f): continue with db.session(): id = media.get_id(f) file = File.get(id) if not file: f_import = f f = f.replace(prefix, prefix_imported) ox.makedirs(os.path.dirname(f)) if options.get("mode") == "move": shutil.move(f_import, f) else: shutil.copy(f_import, f) file = add_file(id, f, prefix_books, f_import) file.move() added += 1 if listname: listitems.append(file.item.id) if time.time() - last > 5: last = time.time() state.activity = {"activity": "import", "progress": [position, len(books)], "path": prefix, "added": added} trigger_event("activity", state.activity) if state.activity.get("cancel"): state.activity = {} return with db.session(): if listname and listitems: l = List.get(settings.USER_ID, listname) if l: l.add_items(listitems) trigger_event( "activity", { "activity": "import", "progress": [position, len(books)], "path": prefix, "status": {"code": 200, "text": ""}, "added": added, }, ) state.activity = {} remove_empty_folders(prefix_books) if options.get("mode") == "move": remove_empty_folders(prefix)