def process_result(self, group_id, result): if result.err: mi = self.report_metadata_failure(group_id, result.traceback) paths = self.file_groups[group_id] has_cover = False duplicate_info = set() if self.add_formats_to_existing else False else: paths, opf, has_cover, duplicate_info = result.value try: mi = OPF(BytesIO(opf), basedir=self.tdir, populate_spine=False, try_to_guess_cover=False).to_book_metadata() mi.read_metadata_failed = False except Exception: mi = self.report_metadata_failure(group_id, traceback.format_exc()) if mi.is_null('title'): for path in paths: mi.title = os.path.splitext(os.path.basename(path))[0] break if mi.application_id == '__calibre_dummy__': mi.application_id = None if gprefs.get('tag_map_on_add_rules'): from calibre.ebooks.metadata.tag_mapper import map_tags mi.tags = map_tags(mi.tags, gprefs['tag_map_on_add_rules']) if self.author_map_rules: from calibre.ebooks.metadata.author_mapper import map_authors new_authors = map_authors(mi.authors, self.author_map_rules) if new_authors != mi.authors: mi.authors = new_authors if self.db is None: mi.author_sort = authors_to_sort_string(mi.authors) else: mi.author_sort = self.db.author_sort_from_authors(mi.authors) self.pd.msg = mi.title cover_path = os.path.join(self.tdir, '%s.cdata' % group_id) if has_cover else None if self.db is None: if paths: self.items.append((mi, cover_path, paths)) return if self.add_formats_to_existing: identical_book_ids = find_identical_books(mi, self.find_identical_books_data) if identical_book_ids: try: self.merge_books(mi, cover_path, paths, identical_book_ids) except Exception: a = self.report.append a(''), a('-' * 70) a(_('Failed to merge the book: ') + mi.title) [a('\t' + f) for f in paths] a(_('With error:')), a(traceback.format_exc()) else: self.add_book(mi, cover_path, paths) else: if duplicate_info or icu_lower(mi.title or _('Unknown')) in self.added_duplicate_info: self.duplicates.append((mi, cover_path, paths)) else: self.add_book(mi, cover_path, paths)
def process_result(self, group_id, result): if result.err: mi = self.report_metadata_failure(group_id, result.traceback) paths = self.file_groups[group_id] has_cover = False duplicate_info = set() if self.add_formats_to_existing else False else: paths, opf, has_cover, duplicate_info = result.value try: mi = OPF( BytesIO(opf), basedir=self.tdir, populate_spine=False, try_to_guess_cover=False ).to_book_metadata() mi.read_metadata_failed = False except Exception: mi = self.report_metadata_failure(group_id, traceback.format_exc()) if mi.is_null("title"): for path in paths: mi.title = os.path.splitext(os.path.basename(path))[0] break if mi.application_id == "__calibre_dummy__": mi.application_id = None if gprefs.get("tag_map_on_add_rules"): from calibre.ebooks.metadata.tag_mapper import map_tags mi.tags = map_tags(mi.tags, gprefs["tag_map_on_add_rules"]) self.pd.msg = mi.title cover_path = os.path.join(self.tdir, "%s.cdata" % group_id) if has_cover else None if self.db is None: if paths: self.items.append((mi, cover_path, paths)) return if self.add_formats_to_existing: identical_book_ids = find_identical_books(mi, self.find_identical_books_data) if identical_book_ids: try: self.merge_books(mi, cover_path, paths, identical_book_ids) except Exception: a = self.report.append a(""), a("-" * 70) a(_("Failed to merge the book: ") + mi.title) [a("\t" + f) for f in paths] a(_("With error:")), a(traceback.format_exc()) else: self.add_book(mi, cover_path, paths) else: if duplicate_info or icu_lower(mi.title or _("Unknown")) in self.added_duplicate_info: self.duplicates.append((mi, cover_path, paths)) else: self.add_book(mi, cover_path, paths)
def main(do_identify, covers, metadata, ensure_fields, tdir): failed_ids = set() failed_covers = set() all_failed = True log = GUILog() patch_plugins() for book_id, mi in iteritems(metadata): mi = OPF(BytesIO(mi), basedir=tdir, populate_spine=False).to_book_metadata() title, authors, identifiers = mi.title, mi.authors, mi.identifiers cdata = None log.clear() if do_identify: results = [] try: results = identify(log, Event(), title=title, authors=authors, identifiers=identifiers) except: pass if results: all_failed = False mi = merge_result(mi, results[0], ensure_fields=ensure_fields) identifiers = mi.identifiers if not mi.is_null('rating'): # set_metadata expects a rating out of 10 mi.rating *= 2 with open(os.path.join(tdir, '%d.mi' % book_id), 'wb') as f: f.write(metadata_to_opf(mi, default_lang='und')) else: log.error('Failed to download metadata for', title) failed_ids.add(book_id) if covers: cdata = download_cover(log, title=title, authors=authors, identifiers=identifiers) if cdata is None: failed_covers.add(book_id) else: with open(os.path.join(tdir, '%d.cover' % book_id), 'wb') as f: f.write(cdata[-1]) all_failed = False with open(os.path.join(tdir, '%d.log' % book_id), 'wb') as f: f.write(log.plain_text.encode('utf-8')) return failed_ids, failed_covers, all_failed
def main(do_identify, covers, metadata, ensure_fields, tdir): failed_ids = set() failed_covers = set() all_failed = True log = GUILog() patch_plugins() for book_id, mi in metadata.iteritems(): mi = OPF(BytesIO(mi), basedir=tdir, populate_spine=False).to_book_metadata() title, authors, identifiers = mi.title, mi.authors, mi.identifiers cdata = None log.clear() if do_identify: results = [] try: results = identify(log, Event(), title=title, authors=authors, identifiers=identifiers) except: pass if results: all_failed = False mi = merge_result(mi, results[0], ensure_fields=ensure_fields) identifiers = mi.identifiers if not mi.is_null('rating'): # set_metadata expects a rating out of 10 mi.rating *= 2 with open(os.path.join(tdir, '%d.mi'%book_id), 'wb') as f: f.write(metadata_to_opf(mi, default_lang='und')) else: log.error('Failed to download metadata for', title) failed_ids.add(book_id) if covers: cdata = download_cover(log, title=title, authors=authors, identifiers=identifiers) if cdata is None: failed_covers.add(book_id) else: with open(os.path.join(tdir, '%d.cover'%book_id), 'wb') as f: f.write(cdata[-1]) all_failed = False with open(os.path.join(tdir, '%d.log'%book_id), 'wb') as f: f.write(log.plain_text.encode('utf-8')) return failed_ids, failed_covers, all_failed