def convert(self, oeb_book, output_path, input_plugin, opts, log): from calibre.ebooks.oeb.base import OEB_IMAGES from calibre.utils.zipfile import ZipFile from lxml import etree with TemporaryDirectory('_txtz_output') as tdir: # TXT txt_name = 'index.txt' if opts.txt_output_formatting.lower() == 'textile': txt_name = 'index.text' with TemporaryFile(txt_name) as tf: TXTOutput.convert(self, oeb_book, tf, input_plugin, opts, log) shutil.copy(tf, os.path.join(tdir, txt_name)) # Images for item in oeb_book.manifest: if item.media_type in OEB_IMAGES: if hasattr(self.writer, 'images'): path = os.path.join(tdir, 'images') if item.href in self.writer.images: href = self.writer.images[item.href] else: continue else: path = os.path.join(tdir, os.path.dirname(item.href)) href = os.path.basename(item.href) if not os.path.exists(path): os.makedirs(path) with open(os.path.join(path, href), 'wb') as imgf: imgf.write(item.data) # Metadata with open(os.path.join(tdir, 'metadata.opf'), 'wb') as mdataf: mdataf.write(etree.tostring(oeb_book.metadata.to_opf1())) txtz = ZipFile(output_path, 'w') txtz.add_dir(tdir)
def reinit_db(dbpath): from contextlib import closing from calibre import as_unicode from calibre.ptempfile import TemporaryFile from calibre.utils.filenames import atomic_rename # We have to use sqlite3 instead of apsw as apsw has no way to discard # problematic statements import sqlite3 from calibre.library.sqlite import do_connect with TemporaryFile(suffix='_tmpdb.db', dir=os.path.dirname(dbpath)) as tmpdb: with closing(do_connect(dbpath)) as src, closing(do_connect(tmpdb)) as dest: dest.execute('create temporary table temp_sequence(id INTEGER PRIMARY KEY AUTOINCREMENT)') dest.commit() uv = int(src.execute('PRAGMA user_version;').fetchone()[0]) dump = src.iterdump() last_restore_error = None while True: try: statement = next(dump) except StopIteration: break except sqlite3.OperationalError as e: prints('Failed to dump a line:', as_unicode(e)) if last_restore_error: prints('Failed to restore a line:', last_restore_error) last_restore_error = None try: dest.execute(statement) except sqlite3.OperationalError as e: last_restore_error = as_unicode(e) # The dump produces an extra commit at the end, so # only print this error if there are more # statements to be restored dest.execute('PRAGMA user_version=%d;'%uv) dest.commit() atomic_rename(tmpdb, dbpath) prints('Database successfully re-initialized')
def convert_cbr_to_cbz(self): ''' Converts a rar or cbr-comic to a cbz-comic ''' from calibre.utils.unrar import extract, comment with TemporaryDirectory('_cbr2cbz') as tdir: # extract the rar file ffile = self.db.format(self.book_id, self.format, as_path=True) extract(ffile, tdir) comments = comment(ffile) delete_temp_file(ffile) # make the cbz file with TemporaryFile("comic.cbz") as tf: zf = ZipFile(tf, "w") zf.add_dir(tdir) zf.close() # write comment if comments: writeZipComment(tf, comments) # add the cbz format to calibres library self.db.add_format(self.book_id, "cbz", tf) self.format = "cbz"
def tweak(ebook_file): ''' Command line interface to the Tweak Book tool ''' fmt = ebook_file.rpartition('.')[-1].lower() exploder, rebuilder = get_tools(fmt) if exploder is None: prints('Cannot tweak %s files. Supported formats are: EPUB, HTMLZ, AZW3, MOBI' , file=sys.stderr) raise SystemExit(1) with TemporaryDirectory('_tweak_'+ os.path.basename(ebook_file).rpartition('.')[0]) as tdir: try: opf = exploder(ebook_file, tdir, question=ask_cli_question) except WorkerError as e: prints('Failed to unpack', ebook_file) prints(e.orig_tb) raise SystemExit(1) except Error as e: prints(as_unicode(e), file=sys.stderr) raise SystemExit(1) if opf is None: # The question was answered with No return ed = os.environ.get('EDITOR', 'dummy') cmd = shlex.split(ed) isvim = bool([x for x in cmd[0].split('/') if x.endswith('vim')]) proceed = False prints('Book extracted to', tdir) if not isvim: prints('Make your tweaks and once you are done,', __appname__, 'will rebuild', ebook_file, 'from', tdir) print() proceed = ask_cli_question('Rebuild ' + ebook_file + '?') else: base = os.path.basename(ebook_file) with TemporaryFile(base+'.zip') as zipf: with ZipFile(zipf, 'w') as zf: zf.add_dir(tdir) try: subprocess.check_call(cmd + [zipf]) except: prints(ed, 'failed, aborting...') raise SystemExit(1) with ZipFile(zipf, 'r') as zf: shutil.rmtree(tdir) os.mkdir(tdir) zf.extractall(path=tdir) proceed = True if proceed: prints('Rebuilding', ebook_file, 'please wait ...') try: rebuilder(tdir, ebook_file) except WorkerError as e: prints('Failed to rebuild', ebook_file) prints(e.orig_tb) raise SystemExit(1) prints(ebook_file, 'successfully tweaked')
def test_legacy_adding_books(self): # {{{ 'Test various adding/deleting books methods' from calibre.ebooks.metadata.book.base import Metadata from calibre.ptempfile import TemporaryFile legacy, old = self.init_legacy(self.cloned_library), self.init_old( self.cloned_library) mi = Metadata('Added Book0', authors=('Added Author', )) with TemporaryFile(suffix='.aff') as name: with open(name, 'wb') as f: f.write(b'xxx') T = partial(ET, 'add_books', ([name], ['AFF'], [mi]), old=old, legacy=legacy) T()(self) book_id = T(kwargs={'return_ids': True})(self)[1][0] self.assertEqual(legacy.new_api.formats(book_id), ('AFF', )) T(kwargs={'add_duplicates': False})(self) mi.title = 'Added Book1' mi.uuid = 'uuu' T = partial(ET, 'import_book', (mi, [name]), old=old, legacy=legacy) book_id = T()(self) self.assertNotEqual(legacy.uuid(book_id, index_is_id=True), old.uuid(book_id, index_is_id=True)) book_id = T(kwargs={'preserve_uuid': True})(self) self.assertEqual(legacy.uuid(book_id, index_is_id=True), old.uuid(book_id, index_is_id=True)) self.assertEqual(legacy.new_api.formats(book_id), ('AFF', )) T = partial(ET, 'add_format', old=old, legacy=legacy) T((0, 'AFF', BytesIO(b'fffff')))(self) T((0, 'AFF', BytesIO(b'fffff')))(self) T((0, 'AFF', BytesIO(b'fffff')), {'replace': True})(self) with TemporaryFile(suffix='.opf') as name: with open(name, 'wb') as f: f.write(b'zzzz') T = partial(ET, 'import_book', (mi, [name]), old=old, legacy=legacy) book_id = T()(self) self.assertFalse(legacy.new_api.formats(book_id)) mi.title = 'Added Book2' T = partial(ET, 'create_book_entry', (mi, ), old=old, legacy=legacy) T() T({'add_duplicates': False}) T({'force_id': 1000}) with TemporaryFile(suffix='.txt') as name: with open(name, 'wb') as f: f.write(b'tttttt') bid = legacy.add_catalog(name, 'My Catalog') self.assertEqual(old.add_catalog(name, 'My Catalog'), bid) cache = legacy.new_api self.assertEqual(cache.formats(bid), ('TXT', )) self.assertEqual(cache.field_for('title', bid), 'My Catalog') self.assertEqual(cache.field_for('authors', bid), ('calibre', )) self.assertEqual(cache.field_for('tags', bid), (_('Catalog'), )) self.assertTrue(bid < legacy.add_catalog(name, 'Something else')) self.assertEqual(legacy.add_catalog(name, 'My Catalog'), bid) self.assertEqual(old.add_catalog(name, 'My Catalog'), bid) bid = legacy.add_news( name, { 'title': 'Events', 'add_title_tag': True, 'custom_tags': ('one', 'two') }) self.assertEqual(cache.formats(bid), ('TXT', )) self.assertEqual(cache.field_for('authors', bid), ('calibre', )) self.assertEqual(cache.field_for('tags', bid), (_('News'), 'Events', 'one', 'two')) self.assertTrue(legacy.cover(1, index_is_id=True)) origcov = legacy.cover(1, index_is_id=True) self.assertTrue(legacy.has_cover(1)) legacy.remove_cover(1) self.assertFalse(legacy.has_cover(1)) self.assertFalse(legacy.cover(1, index_is_id=True)) legacy.set_cover(3, origcov) self.assertEqual(legacy.cover(3, index_is_id=True), origcov) self.assertTrue(legacy.has_cover(3)) self.assertTrue(legacy.format(1, 'FMT1', index_is_id=True)) legacy.remove_format(1, 'FMT1', index_is_id=True) self.assertIsNone(legacy.format(1, 'FMT1', index_is_id=True)) legacy.delete_book(1) old.delete_book(1) self.assertNotIn(1, legacy.all_ids()) legacy.dump_metadata((2, 3)) old.close()
def extract_content(self, output_dir): # Each text record is independent (unless the continuation # value is set in the previous record). Put each converted # text recored into a separate file. We will reference the # home.html file as the first file and let the HTML input # plugin assemble the order based on hyperlinks. with CurrentDir(output_dir): for uid, num in self.uid_text_secion_number.items(): self.log.debug('Writing record with uid: %s as %s.html' % (uid, uid)) with open('%s.html' % uid, 'wb') as htmlf: html = u'<html><body>' section_header, section_data = self.sections[num] if section_header.type == DATATYPE_PHTML: html += self.process_phtml( section_data.data, section_data.header.paragraph_offsets) elif section_header.type == DATATYPE_PHTML_COMPRESSED: d = self.decompress_phtml(section_data.data) html += self.process_phtml( d, section_data.header.paragraph_offsets).decode( self.get_text_uid_encoding(section_header.uid), 'replace') html += '</body></html>' htmlf.write(html.encode('utf-8')) # Images. # Cache the image sizes in case they are used by a composite image. image_sizes = {} if not os.path.exists(os.path.join(output_dir, 'images/')): os.makedirs(os.path.join(output_dir, 'images/')) with CurrentDir(os.path.join(output_dir, 'images/')): # Single images. for uid, num in self.uid_image_section_number.items(): section_header, section_data = self.sections[num] if section_data: idata = None if section_header.type == DATATYPE_TBMP: idata = section_data elif section_header.type == DATATYPE_TBMP_COMPRESSED: if self.header_record.compression == 1: idata = decompress_doc(section_data) elif self.header_record.compression == 2: idata = zlib.decompress(section_data) try: with TemporaryFile(suffix='.palm') as itn: with open(itn, 'wb') as itf: itf.write(idata) im = Image() im.read(itn) image_sizes[uid] = im.size im.set_compression_quality(70) im.save('%s.jpg' % uid) self.log.debug( 'Wrote image with uid %s to images/%s.jpg' % (uid, uid)) except Exception as e: self.log.error( 'Failed to write image with uid %s: %s' % (uid, e)) else: self.log.error( 'Failed to write image with uid %s: No data.' % uid) # Composite images. # We're going to use the already compressed .jpg images here. for uid, num in self.uid_composite_image_section_number.items(): try: section_header, section_data = self.sections[num] # Get the final width and height. width = 0 height = 0 for row in section_data.layout: row_width = 0 col_height = 0 for col in row: if col not in image_sizes: raise Exception('Image with uid: %s missing.' % col) im = Image() im.read('%s.jpg' % col) w, h = im.size row_width += w if col_height < h: col_height = h if width < row_width: width = row_width height += col_height # Create a new image the total size of all image # parts. Put the parts into the new image. canvas = create_canvas(width, height) y_off = 0 for row in section_data.layout: x_off = 0 largest_height = 0 for col in row: im = Image() im.read('%s.jpg' % col) canvas.compose(im, x_off, y_off) w, h = im.size x_off += w if largest_height < h: largest_height = h y_off += largest_height canvas.set_compression_quality(70) canvas.save('%s.jpg' % uid) self.log.debug( 'Wrote composite image with uid %s to images/%s.jpg' % (uid, uid)) except Exception as e: self.log.error( 'Failed to write composite image with uid %s: %s' % (uid, e)) # Run the HTML through the html processing plugin. from calibre.customize.ui import plugin_for_input_format html_input = plugin_for_input_format('html') for opt in html_input.options: setattr(self.options, opt.option.name, opt.recommended_value) self.options.input_encoding = 'utf-8' odi = self.options.debug_pipeline self.options.debug_pipeline = None # Determine the home.html record uid. This should be set in the # reserved values in the metadata recored. home.html is the first # text record (should have hyper link references to other records) # in the document. try: home_html = self.header_record.home_html if not home_html: home_html = self.uid_text_secion_number.items()[0][0] except: raise Exception('Could not determine home.html') # Generate oeb from html conversion. oeb = html_input.convert(open('%s.html' % home_html, 'rb'), self.options, 'html', self.log, {}) self.options.debug_pipeline = odi return oeb
def sync_booklists(self, booklists, end_session=True): ''' Update metadata on device. :param booklists: A tuple containing the result of calls to (:meth:`books(oncard=None)`, :meth:`books(oncard='carda')`, :meth`books(oncard='cardb')`). ''' if not self.bambook: return json_codec = JsonCodec() # Create stub virtual book for sync info with TemporaryDirectory() as tdir: snbcdir = os.path.join(tdir, 'snbc') snbfdir = os.path.join(tdir, 'snbf') os.mkdir(snbcdir) os.mkdir(snbfdir) f = open(os.path.join(snbfdir, 'book.snbf'), 'wb') f.write('''<book-snbf version="1.0"> <head> <name>calibre同步信息</name> <author>calibre</author> <language>ZH-CN</language> <rights/> <publisher>calibre</publisher> <generator>''' + __appname__ + ' ' + __version__ + '''</generator> <created/> <abstract></abstract> <cover/> </head> </book-snbf> ''') f.close() f = open(os.path.join(snbfdir, 'toc.snbf'), 'wb') f.write('''<toc-snbf> <head> <chapters>0</chapters> </head> <body> </body> </toc-snbf> ''') f.close() cache_name = os.path.join(snbcdir, self.METADATA_CACHE) with open(cache_name, 'wb') as f: json_codec.encode_to_file(f, booklists[0]) with TemporaryFile('.snb') as f: if self.bambook.PackageSNB(f, tdir): if not self.bambook.SendFile(f, self.METADATA_FILE_GUID): print "Upload failed" else: print "Package failed" # Clear the _new_book indication, as we are supposed to be done with # adding books at this point for blist in booklists: if blist is not None: for book in blist: book._new_book = False self.report_progress(1.0, _('Sending metadata to device...'))
def upload_books(self, files, names, on_card=None, end_session=True, metadata=None): ''' Upload a list of books to the device. If a file already exists on the device, it should be replaced. This method should raise a :class:`FreeSpaceError` if there is not enough free space on the device. The text of the FreeSpaceError must contain the word "card" if ``on_card`` is not None otherwise it must contain the word "memory". :param files: A list of paths and/or file-like objects. If they are paths and the paths point to temporary files, they may have an additional attribute, original_file_path pointing to the originals. They may have another optional attribute, deleted_after_upload which if True means that the file pointed to by original_file_path will be deleted after being uploaded to the device. :param names: A list of file names that the books should have once uploaded to the device. len(names) == len(files) :param metadata: If not None, it is a list of :class:`Metadata` objects. The idea is to use the metadata to determine where on the device to put the book. len(metadata) == len(files). Apart from the regular cover (path to cover), there may also be a thumbnail attribute, which should be used in preference. The thumbnail attribute is of the form (width, height, cover_data as jpeg). :return: A list of 3-element tuples. The list is meant to be passed to :meth:`add_books_to_metadata`. ''' self.report_progress(0, _('Transferring books to device...')) paths = [] if self.bambook: for (i, f) in enumerate(files): self.report_progress((i + 1) / float(len(files)), _('Transferring books to device...')) if not hasattr(f, 'read'): # Handle PDF File if f[-3:].upper() == "PDF": # Package the PDF file with TemporaryDirectory() as tdir: snbcdir = os.path.join(tdir, 'snbc') snbfdir = os.path.join(tdir, 'snbf') os.mkdir(snbcdir) os.mkdir(snbfdir) tmpfile = open(os.path.join(snbfdir, 'book.snbf'), 'wb') tmpfile.write('''<book-snbf version="1.0"> <head> <name><![CDATA[''' + metadata[i].title + ''']]></name> <author><![CDATA[''' + ' '.join(metadata[i].authors) + ''']]></author> <language>ZH-CN</language> <rights/> <publisher>calibre</publisher> <generator>''' + __appname__ + ' ' + __version__ + '''</generator> <created/> <abstract></abstract> <cover/> </head> </book-snbf> ''') tmpfile.close() tmpfile = open(os.path.join(snbfdir, 'toc.snbf'), 'wb') tmpfile.write('''<toc-snbf> <head> <chapters>1</chapters> </head> <body> <chapter src="pdf1.pdf"><![CDATA[''' + metadata[i].title + ''']]></chapter> </body> </toc-snbf> ''') tmpfile.close() pdf_name = os.path.join(snbcdir, "pdf1.pdf") shutil.copyfile(f, pdf_name) with TemporaryFile('.snb') as snbfile: if self.bambook.PackageSNB( snbfile, tdir ) and self.bambook.VerifySNB(snbfile): guid = self.bambook.SendFile( snbfile, self.get_guid(metadata[i].uuid)) elif f[-3:].upper() == 'SNB': if self.bambook.VerifySNB(f): guid = self.bambook.SendFile( f, self.get_guid(metadata[i].uuid)) else: print "book invalid" if guid: paths.append(guid) else: print "Send fail" ret = zip(paths, cycle([on_card])) self.report_progress(1.0, _('Transferring books to device...')) return ret