def cover(self, path, as_file=False, as_image=False, as_path=False): path = os.path.join(self.library_path, path, 'cover.jpg') ret = None if os.access(path, os.R_OK): try: f = lopen(path, 'rb') except (IOError, OSError): time.sleep(0.2) f = lopen(path, 'rb') with f: if as_path: pt = PersistentTemporaryFile('_dbcover.jpg') with pt: shutil.copyfileobj(f, pt) return pt.name if as_file: ret = SpooledTemporaryFile(SPOOL_SIZE) shutil.copyfileobj(f, ret) ret.seek(0) else: ret = f.read() if as_image: from PyQt4.Qt import QImage i = QImage() i.loadFromData(ret) ret = i return ret
def write_metadata_cache(self, storage, bl): from calibre.devices.mtp.books import JSONCodec if bl.storage_id != storage.storage_id: # Just a sanity check, should never happen return json_codec = JSONCodec() stream = SpooledTemporaryFile(10*(1024**2)) json_codec.encode_to_file(stream, bl) size = stream.tell() stream.seek(0) self.put_calibre_file(storage, 'metadata', stream, size)
def open(self, name, spool_size=5 * 1024 * 1024): if isinstance(name, LocalHeader): name = name.filename offset, header = self._get_file_info(name) self.stream.seek(offset) dest = SpooledTemporaryFile(max_size=spool_size) if header.compression_method == ZIP_STORED: copy_stored_file(self.stream, header.compressed_size, dest) else: copy_compressed_file(self.stream, header.compressed_size, dest) dest.seek(0) return dest
def get_mtp_file(self, f, stream=None, callback=None): if f.is_folder: raise ValueError('%s if a folder'%(f.full_path,)) set_name = stream is None if stream is None: stream = SpooledTemporaryFile(5*1024*1024, '_wpd_receive_file.dat') ok, errs = self.dev.get_file(f.object_id, stream, callback) if not ok: raise DeviceError('Failed to get file: %s with errors: %s'%( f.full_path, self.format_errorstack(errs))) stream.seek(0) if set_name: stream.name = f.name return stream
def safe_replace(self, name, datastream, extra_replacements={}, add_missing=False): from calibre.utils.zipfile import ZipFile, ZipInfo replacements = {name:datastream} replacements.update(extra_replacements) names = frozenset(list(replacements.keys())) found = set() def rbytes(name): r = replacements[name] if not isinstance(r, bytes): r = r.read() return r with SpooledTemporaryFile(max_size=100*1024*1024) as temp: ztemp = ZipFile(temp, 'w') for offset, header in itervalues(self.file_info): if header.filename in names: zi = ZipInfo(header.filename) zi.compress_type = header.compression_method ztemp.writestr(zi, rbytes(header.filename)) found.add(header.filename) else: ztemp.writestr(header.filename, self.read(header.filename, spool_size=0)) if add_missing: for name in names - found: ztemp.writestr(name, rbytes(name)) ztemp.close() zipstream = self.stream temp.seek(0) zipstream.seek(0) zipstream.truncate() shutil.copyfileobj(temp, zipstream) zipstream.flush()
def decompress(raw, outfile=None): ''' Decompress the specified data. :param raw: A bytestring or a file-like object open for reading :outfile: A file like object open for writing. The decompressed data is written into it. If not specified then a SpooledTemporaryFile is created and returned by this function. ''' if isinstance(raw, bytes): raw = BytesIO(raw) outfile = outfile or SpooledTemporaryFile(50 * 1024 * 1024, '_xz_decompress') while True: read_stream(raw, outfile) pos = raw.tell() trail = raw.read(1024) if len(trail) < 20: break idx = trail.find(HEADER_MAGIC) if idx == -1: break if idx > -1: # Found another stream raw.seek(pos) if idx: padding = raw.read(idx) if padding.lstrip(b'\0') or len(padding) % 4: raise InvalidXZ('Found trailing garbage between streams') return outfile
def commit(self, db, id_): if not self.changed: return True old_extensions, new_extensions, paths = set(), set(), {} for row in range(self.formats.count()): fmt = self.formats.item(row) ext, path = fmt.ext.lower(), fmt.path if 'unknown' in ext.lower(): ext = None if path: new_extensions.add(ext) paths[ext] = path else: old_extensions.add(ext) for ext in new_extensions: with SpooledTemporaryFile(SPOOL_SIZE) as spool: with open(paths[ext], 'rb') as f: shutil.copyfileobj(f, spool) spool.seek(0) db.add_format(id_, ext, spool, notify=False, index_is_id=True) dbfmts = db.formats(id_, index_is_id=True) db_extensions = set( [f.lower() for f in (dbfmts.split(',') if dbfmts else [])]) extensions = new_extensions.union(old_extensions) for ext in db_extensions: if ext not in extensions and ext in self.original_val: db.remove_format(id_, ext, notify=False, index_is_id=True) self.changed = False return True
def read_request_body(self, inheaders, request_content_length, chunked_read): buf = SpooledTemporaryFile(prefix='rq-body-', max_size=DEFAULT_BUFFER_SIZE, dir=self.tdir) if chunked_read: self.set_state(READ, self.read_chunk_length, inheaders, Accumulator(), buf, [0]) else: if request_content_length > 0: self.set_state(READ, self.sized_read, inheaders, buf, request_content_length) else: self.prepare_response(inheaders, BytesIO())
def get_mtp_file(self, f, stream=None, callback=None): if f.is_folder: raise ValueError('%s if a folder'%(f.full_path,)) set_name = stream is None if stream is None: stream = SpooledTemporaryFile(5*1024*1024, '_wpd_receive_file.dat') try: try: self.dev.get_file(f.object_id, stream, callback) except self.wpd.WPDFileBusy: time.sleep(2) self.dev.get_file(f.object_id, stream, callback) except Exception as e: raise DeviceError('Failed to fetch the file %s with error: %s'% (f.full_path, as_unicode(e))) stream.seek(0) if set_name: stream.name = f.name return stream
def decompress(raw, outfile=None, bufsize=10 * 1024 * 1024): if isinstance(raw, bytes): raw = BytesIO(raw) uncompressed_size, header = read_header(raw) outfile = outfile or SpooledTemporaryFile(50 * 1024 * 1024, '_lzma_decompress') lzma.decompress(raw.read, raw.seek, outfile.write, uncompressed_size, header, bufsize) if uncompressed_size < outfile.tell(): outfile.seek(uncompressed_size) outfile.truncate() return outfile
def cover(self, book_id, as_file=False, as_image=False, as_path=False): ''' Return the cover image or None. By default, returns the cover as a bytestring. WARNING: Using as_path will copy the cover to a temp file and return the path to the temp file. You should delete the temp file when you are done with it. :param as_file: If True return the image as an open file object (a SpooledTemporaryFile) :param as_image: If True return the image as a QImage object :param as_path: If True return the image as a path pointing to a temporary file ''' if as_file: ret = SpooledTemporaryFile(SPOOL_SIZE) if not self.copy_cover_to(book_id, ret): return ret.seek(0) elif as_path: pt = PersistentTemporaryFile('_dbcover.jpg') with pt: if not self.copy_cover_to(book_id, pt): return ret = pt.name else: buf = BytesIO() if not self.copy_cover_to(book_id, buf): return ret = buf.getvalue() if as_image: from PyQt4.Qt import QImage i = QImage() i.loadFromData(ret) ret = i return ret
if opts.write_opf: opf = metadata_to_opf(mi) with open(base_path+'.opf', 'wb') as f: f.write(opf) mi.cover = ocover written = False for fmt in formats: global plugboard_save_to_disk_value, plugboard_any_format_value cpb = find_plugboard(plugboard_save_to_disk_value, fmt, plugboards) fp = format_map.get(fmt, None) if fp is None: continue stream = SpooledTemporaryFile(20*1024*1024, '_save_to_disk.'+(fmt or 'tmp')) with open(fp, 'rb') as f: shutil.copyfileobj(f, stream) stream.seek(0) written = True if opts.update_metadata: try: if cpb: newmi = mi.deepcopy_metadata() newmi.template_to_attribute(mi, cpb) else: newmi = mi if cover: newmi.cover_data = ('jpg', cover) set_metadata(stream, newmi, fmt) except:
def __init__(self, file_obj, max_size=50 * 1024 * 1024): self._file_obj = file_obj SpooledTemporaryFile.__init__(self, max_size)
def compress(raw, outfile=None, level=5, check_type='crc64'): ''' Compress the specified data into a .xz stream (which can be written directly as an .xz file. :param raw: A bytestring or a file-like object open for reading :outfile: A file like object open for writing. The .xz stream is written into it. If not specified then a SpooledTemporaryFile is created and returned by this function. :level: An integer between 0 and 9 with 0 being fastest/worst compression and 9 being slowest/best compression :check_type: The type of data integrity check to write into the output .xz stream. Should be one of: 'crc32', 'crc64', 'sha256', or None ''' if isinstance(raw, bytes): raw = BytesIO(raw) outfile = outfile or SpooledTemporaryFile(50 * 1024 * 1024, '_xz_decompress') # Write stream header outfile.write(HEADER_MAGIC) check_type = { 'crc': 1, 'crc32': 1, 'sha256': 0xa, None: 0, '': 0, 'none': 0, 'None': 0 }.get(check_type, 4) stream_flags = as_bytes(0, check_type) outfile.write(stream_flags) outfile.write(pack(b'<I', crc32(stream_flags))) # Write block header filter_flags = encode_var_int(LZMA2_FILTER_ID) + encode_var_int( 1) + lzma.preset_map[level] block_header = align(b'\0\0' + filter_flags) bhs = ((4 + len(block_header)) // 4) - 1 block_header = as_bytes(bhs) + block_header[1:] block_header += pack(b'<I', crc32(block_header)) start = outfile.tell() outfile.write(block_header) # Write compressed data and check checker = { 0: DummyChecker, 1: CRCChecker, 4: CRCChecker, 0xa: Sha256Checker }[check_type](check_type) uncompressed_size = [0] def read(n): ans = raw.read(n) if ans: uncompressed_size[0] += len(ans) checker(ans) return ans lzma.compress(read, outfile.write, None, level) unpadded_size = outfile.tell() - start pos = outfile.tell() if pos % 4: outfile.write(b'\0' * (4 - (pos % 4))) checker.finish() if check_type: cc = checker.code_as_bytes outfile.write(cc) unpadded_size += len(cc) # Write index index = b'\0' + encode_var_int(1) index += encode_var_int(unpadded_size) + encode_var_int( uncompressed_size[0]) if len(index) % 4: index += b'\0' * (4 - len(index) % 4) outfile.write(index), outfile.write(pack(b'<I', crc32(index))) # Write stream footer backwards_size = pack(b'<I', ((len(index) + 4) // 4) - 1) outfile.write(pack(b'<I', crc32(backwards_size + stream_flags))) outfile.write(backwards_size), outfile.write(stream_flags), outfile.write( FOOTER_MAGIC)
def do_save_book_to_disk(id_, mi, cover, plugboards, format_map, root, opts, length): available_formats = [x.lower().strip() for x in format_map.keys()] if mi.pubdate: mi.pubdate = as_local_time(mi.pubdate) if mi.timestamp: mi.timestamp = as_local_time(mi.timestamp) if opts.formats == 'all': asked_formats = available_formats else: asked_formats = [x.lower().strip() for x in opts.formats.split(',')] formats = set(available_formats).intersection(set(asked_formats)) if not formats: return True, id_, mi.title components = get_path_components(opts, mi, id_, length) base_path = os.path.join(root, *components) base_name = os.path.basename(base_path) dirpath = os.path.dirname(base_path) # Don't test for existence first as the test could fail but # another worker process could create the directory before # the call to makedirs try: os.makedirs(dirpath) except BaseException: if not os.path.exists(dirpath): raise ocover = mi.cover if opts.save_cover and cover: with open(base_path + '.jpg', 'wb') as f: f.write(cover) mi.cover = base_name + '.jpg' else: mi.cover = None if opts.write_opf: from calibre.ebooks.metadata.opf2 import metadata_to_opf opf = metadata_to_opf(mi) with open(base_path + '.opf', 'wb') as f: f.write(opf) mi.cover = ocover written = False for fmt in formats: fp = format_map.get(fmt, None) if fp is None: continue stream = SpooledTemporaryFile(20 * 1024 * 1024, '_save_to_disk.' + (fmt or 'tmp')) with open(fp, 'rb') as f: shutil.copyfileobj(f, stream) stream.seek(0) written = True if opts.update_metadata: update_metadata(mi, fmt, stream, plugboards, cover) stream.seek(0) fmt_path = base_path + '.' + str(fmt) with open(fmt_path, 'wb') as f: shutil.copyfileobj(stream, f) return not written, id_, mi.title
def do_save_book_to_disk(id_, mi, cover, plugboards, format_map, root, opts, length): available_formats = [x.lower().strip() for x in format_map.keys()] if mi.pubdate: mi.pubdate = as_local_time(mi.pubdate) if mi.timestamp: mi.timestamp = as_local_time(mi.timestamp) if opts.formats == 'all': asked_formats = available_formats else: asked_formats = [x.lower().strip() for x in opts.formats.split(',')] formats = set(available_formats).intersection(set(asked_formats)) if not formats: return True, id_, mi.title components = get_path_components(opts, mi, id_, length) base_path = os.path.join(root, *components) base_name = os.path.basename(base_path) dirpath = os.path.dirname(base_path) # Don't test for existence first as the test could fail but # another worker process could create the directory before # the call to makedirs try: os.makedirs(dirpath) except BaseException: if not os.path.exists(dirpath): raise ocover = mi.cover if opts.save_cover and cover: with open(base_path+'.jpg', 'wb') as f: f.write(cover) mi.cover = base_name+'.jpg' else: mi.cover = None if opts.write_opf: from calibre.ebooks.metadata.opf2 import metadata_to_opf opf = metadata_to_opf(mi) with open(base_path+'.opf', 'wb') as f: f.write(opf) mi.cover = ocover written = False for fmt in formats: fp = format_map.get(fmt, None) if fp is None: continue stream = SpooledTemporaryFile(20*1024*1024, '_save_to_disk.'+(fmt or 'tmp')) with open(fp, 'rb') as f: shutil.copyfileobj(f, stream) stream.seek(0) written = True if opts.update_metadata: update_metadata(mi, fmt, stream, plugboards, cover) stream.seek(0) fmt_path = base_path+'.'+str(fmt) with open(fmt_path, 'wb') as f: shutil.copyfileobj(stream, f) return not written, id_, mi.title
def format(self, book_id, fmt, as_file=False, as_path=False, preserve_filename=False): ''' Return the ebook format as a bytestring or `None` if the format doesn't exist, or we don't have permission to write to the ebook file. :param as_file: If True the ebook format is returned as a file object. Note that the file object is a SpooledTemporaryFile, so if what you want to do is copy the format to another file, use :method:`copy_format_to` instead for performance. :param as_path: Copies the format file to a temp file and returns the path to the temp file :param preserve_filename: If True and returning a path the filename is the same as that used in the library. Note that using this means that repeated calls yield the same temp file (which is re-created each time) ''' ext = ('.'+fmt.lower()) if fmt else '' if as_path: if preserve_filename: with self.read_lock: try: fname = self.fields['formats'].format_fname(book_id, fmt) except: return None fname += ext bd = base_dir() d = os.path.join(bd, 'format_abspath') try: os.makedirs(d) except: pass ret = os.path.join(d, fname) try: self.copy_format_to(book_id, fmt, ret) except NoSuchFormat: return None else: with PersistentTemporaryFile(ext) as pt: try: self.copy_format_to(book_id, fmt, pt) except NoSuchFormat: return None ret = pt.name elif as_file: with self.read_lock: try: fname = self.fields['formats'].format_fname(book_id, fmt) except: return None fname += ext ret = SpooledTemporaryFile(SPOOL_SIZE) try: self.copy_format_to(book_id, fmt, ret) except NoSuchFormat: return None ret.seek(0) # Various bits of code try to use the name as the default # title when reading metadata, so set it ret.name = fname else: buf = BytesIO() try: self.copy_format_to(book_id, fmt, buf) except NoSuchFormat: return None ret = buf.getvalue() return ret
if opts.write_opf: opf = metadata_to_opf(mi) with open(base_path + '.opf', 'wb') as f: f.write(opf) mi.cover = ocover written = False for fmt in formats: global plugboard_save_to_disk_value, plugboard_any_format_value cpb = find_plugboard(plugboard_save_to_disk_value, fmt, plugboards) fp = format_map.get(fmt, None) if fp is None: continue stream = SpooledTemporaryFile(20 * 1024 * 1024, '_save_to_disk.' + (fmt or 'tmp')) with open(fp, 'rb') as f: shutil.copyfileobj(f, stream) stream.seek(0) written = True if opts.update_metadata: try: if cpb: newmi = mi.deepcopy_metadata() newmi.template_to_attribute(mi, cpb) else: newmi = mi if cover: newmi.cover_data = ('jpg', cover) set_metadata(stream, newmi, fmt) except:
def format(self, book_id, fmt, as_file=False, as_path=False, preserve_filename=False): ''' Return the ebook format as a bytestring or `None` if the format doesn't exist, or we don't have permission to write to the ebook file. :param as_file: If True the ebook format is returned as a file object. Note that the file object is a SpooledTemporaryFile, so if what you want to do is copy the format to another file, use :method:`copy_format_to` instead for performance. :param as_path: Copies the format file to a temp file and returns the path to the temp file :param preserve_filename: If True and returning a path the filename is the same as that used in the library. Note that using this means that repeated calls yield the same temp file (which is re-created each time) ''' ext = ('.' + fmt.lower()) if fmt else '' if as_path: if preserve_filename: with self.read_lock: try: fname = self.fields['formats'].format_fname( book_id, fmt) except: return None fname += ext bd = base_dir() d = os.path.join(bd, 'format_abspath') try: os.makedirs(d) except: pass ret = os.path.join(d, fname) try: self.copy_format_to(book_id, fmt, ret) except NoSuchFormat: return None else: with PersistentTemporaryFile(ext) as pt: try: self.copy_format_to(book_id, fmt, pt) except NoSuchFormat: return None ret = pt.name elif as_file: with self.read_lock: try: fname = self.fields['formats'].format_fname(book_id, fmt) except: return None fname += ext ret = SpooledTemporaryFile(SPOOL_SIZE) try: self.copy_format_to(book_id, fmt, ret) except NoSuchFormat: return None ret.seek(0) # Various bits of code try to use the name as the default # title when reading metadata, so set it ret.name = fname else: buf = BytesIO() try: self.copy_format_to(book_id, fmt, buf) except NoSuchFormat: return None ret = buf.getvalue() return ret