def unignore(alias, chapters): """Unignore chapters for a series. Enter one or more chapters after the alias to mark them as new. Enter the chapter identifiers as they are listed when using the chapters command. To unignore all of the chapters for a particular series, use the word "all" in place of the chapters. """ s = db.Series.alias_lookup(alias) query = db.session.query(db.Chapter).filter(db.Chapter.series == s, db.Chapter.downloaded == -1) if len(chapters) == 1 and chapters[0].lower() == 'all': click.echo('Unignoring {} chapters for {}'.format(len(s.chapters), s.name)) click.confirm('Do you want to continue', prompt_suffix='? ', abort=True) else: query = query.filter(db.Chapter.chapter.in_(chapters)) chapters = [x.to_object() for x in query.all()] for chapter in chapters: chapter.mark_new() if len(chapters) == 1: output.chapter('Unignored chapter {} for {}'.format( chapters[0].chapter, s.name )) else: output.series('Unignored {} chapters for {}'.format( len(chapters), s.name ))
def update(): """Gather new chapters from followed series.""" pool = concurrent.futures.ThreadPoolExecutor(config.get().download_threads) futures = [] warnings = [] aliases = {} query = db.session.query(db.Series).filter_by(following=True).all() output.series('Updating {} series'.format(len(query))) for follow in query: fut = pool.submit(series_by_url, follow.url) futures.append(fut) aliases[fut] = follow.alias with click.progressbar(length=len(futures), show_pos=True, fill_char='>', empty_char=' ') as bar: for future in concurrent.futures.as_completed(futures): try: series = future.result() except requests.exceptions.ConnectionError as e: warnings.append('Unable to update {} (connection error)' .format(aliases[future])) except exceptions.ScrapingError: warnings.append('Unable to update {} (scraping error)' .format(follow.alias)) else: series.update() bar.update(1) for w in warnings: output.warning(w) list_new()
def repair_db(): """Runs an automated database repair.""" sanity_tester = sanity.DatabaseSanity(db.Base, db.engine) sanity_tester.test() if sanity_tester.errors: output.series('Backing up database to cum.db.bak') db.backup_database() output.series('Running database repair') for error in sanity_tester.errors: error.fix()
def unfollow(alias): """Unfollow manga. Will mark a series as unfollowed. In order not to lose history of downloaded chapters, the series is merely marked as unfollowed in the database rather than removed. """ s = db.Series.alias_lookup(alias) s.following = False db.session.commit() output.series('Removing follow for {}'.format(s.name))
def update(fast): """Gather new chapters from followed series.""" pool = concurrent.futures.ThreadPoolExecutor(config.get().download_threads) futures = [] warnings = [] aliases = {} query = db.session.query(db.Series).filter_by(following=True).all() if fast: skip_count = 0 for series in query.copy(): if not series.needs_update: skip_count += 1 query.remove(series) output.series('Updating {} series ({} skipped)'.format( len(query), skip_count)) else: output.series('Updating {} series'.format(len(query))) for follow in query: fut = pool.submit(utility.series_by_url, follow.url) futures.append(fut) aliases[fut] = follow.alias with click.progressbar(length=len(futures), show_pos=True, fill_char='>', empty_char=' ') as bar: for future in concurrent.futures.as_completed(futures): try: series = future.result() except exceptions.ConnectionError: warnings.append( 'Unable to update {} (connection error)'.format( aliases[future])) except exceptions.ScrapingError: warnings.append('Unable to update {} (scraping error)'.format( aliases[future])) except exceptions.LoginError as e: warnings.append('Unable to update {} ({})'.format( aliases[future], e.message)) else: series.update() bar.update(1) for w in warnings: output.warning(w) utility.list_new()
def follow(self, ignore=False): """Adds the series details to database and all current chapters.""" output.series('Adding follow for {s.name} ({s.alias})'.format(s=self)) try: s = db.session.query(db.Series).filter_by(url=self.url).one() except NoResultFound: s = db.Series(self) db.session.add(s) db.session.commit() else: if s.following: output.warning('You are already following this series') else: s.directory = self.directory s.following = True db.session.commit() for chapter in self.chapters: chapter.save(s, ignore=ignore)
def update(fast): """Gather new chapters from followed series.""" pool = concurrent.futures.ThreadPoolExecutor(config.get().download_threads) futures = [] warnings = [] aliases = {} query = db.session.query(db.Series).filter_by(following=True).all() if fast: skip_count = 0 for series in query.copy(): if not series.needs_update: skip_count += 1 query.remove(series) output.series('Updating {} series ({} skipped)' .format(len(query), skip_count)) else: output.series('Updating {} series'.format(len(query))) for follow in query: fut = pool.submit(utility.series_by_url, follow.url) futures.append(fut) aliases[fut] = follow.alias with click.progressbar(length=len(futures), show_pos=True, fill_char='>', empty_char=' ') as bar: for future in concurrent.futures.as_completed(futures): try: series = future.result() except exceptions.ConnectionError: warnings.append('Unable to update {} (connection error)' .format(aliases[future])) except exceptions.ScrapingError: warnings.append('Unable to update {} (scraping error)' .format(aliases[future])) except exceptions.LoginError as e: warnings.append('Unable to update {} ({})' .format(aliases[future], e.message)) else: series.update() bar.update(1) for w in warnings: output.warning(w) utility.list_new()
def follow(self, ignore=False): """Adds the series details to database and all current chapters.""" try: s = db.session.query(db.Series).filter_by(url=self.url).one() except NoResultFound: s = db.Series(self) s.check_alias_uniqueness() output.series('Adding follow for {s.name} ({s.alias})'.format(s=s)) db.session.add(s) db.session.commit() else: if s.following: output.warning('You are already following {s.name} ({s.alias})' .format(s=s)) else: s.directory = self.directory s.following = True db.session.commit() for chapter in self.chapters: chapter.save(s, ignore=ignore)
def set_ignored(mark_ignored, alias, chapters): """Helper function for `cum ignore` and `cum unignore` commands, which will either ignore chapters if mark_ignored is True or unignore chapters if mark_ignored is False. """ if mark_ignored: downloaded = 0 message_start = 'I' method = 'ignore' else: downloaded = -1 message_start = 'Uni' method = 'mark_new' s = db.Series.alias_lookup(alias) query = (db.session.query(db.Chapter) .filter(db.Chapter.series == s, db.Chapter.downloaded == downloaded)) if len(chapters) == 1 and chapters[0].lower() == 'all': click.echo('{}gnoring {} chapters for {}' .format(message_start, len(s.chapters), s.name)) click.confirm('Do you want to continue', prompt_suffix='? ', abort=True) else: query = query.filter(db.Chapter.chapter.in_(chapters)) chapters = [x.to_object() for x in query.all()] for chapter in chapters: function = getattr(chapter, method) function() if len(chapters) == 1: output.chapter('{}gnored chapter {} for {}' .format(message_start, chapters[0].chapter, s.name)) else: output.series('{}gnored {} chapters for {}' .format(message_start, len(chapters), s.name))