def get(ctx, rd, what, book_id, library_id): book_id, rest = book_id.partition('_')[::2] try: book_id = int(book_id) except Exception: raise HTTPNotFound('Book with id %r does not exist' % book_id) db = get_db(ctx, rd, library_id) if db is None: raise HTTPNotFound('Library %r not found' % library_id) with db.safe_read_lock: if not ctx.has_id(rd, db, book_id): raise BookNotFound(book_id, db) library_id = db.server_library_id # in case library_id was None if what == 'thumb': sz = rd.query.get('sz') w, h = 60, 80 if sz is None: if rest: try: w, h = map(int, rest.split('_')) except Exception: pass elif sz == 'full': w = h = None elif 'x' in sz: try: w, h = map(int, sz.partition('x')[::2]) except Exception: pass else: try: w = h = int(sz) except Exception: pass return cover(ctx, rd, library_id, db, book_id, width=w, height=h) elif what == 'cover': return cover(ctx, rd, library_id, db, book_id) elif what == 'opf': mi = db.get_metadata(book_id, get_cover=False) rd.outheaders[ 'Content-Type'] = 'application/oebps-package+xml; charset=UTF-8' rd.outheaders['Last-Modified'] = http_date( timestampfromdt(mi.last_modified)) return metadata_to_opf(mi) elif what == 'json': from calibre.srv.ajax import book_to_json data, last_modified = book_to_json(ctx, rd, db, book_id) rd.outheaders['Last-Modified'] = http_date( timestampfromdt(last_modified)) return json(ctx, rd, get, data) else: try: return book_fmt(ctx, rd, library_id, db, book_id, what.lower()) except NoSuchFormat: raise HTTPNotFound('No %s format for the book %r' % (what.lower(), book_id))
def get(ctx, rd, what, book_id, library_id): book_id, rest = book_id.partition('_')[::2] try: book_id = int(book_id) except Exception: raise HTTPNotFound('Book with id %r does not exist' % book_id) db = get_db(ctx, rd, library_id) if db is None: raise HTTPNotFound('Library %r not found' % library_id) with db.safe_read_lock: if not ctx.has_id(rd, db, book_id): raise BookNotFound(book_id, db) library_id = db.server_library_id # in case library_id was None if what == 'thumb': sz = rd.query.get('sz') w, h = 60, 80 if sz is None: if rest: try: w, h = map(int, rest.split('_')) except Exception: pass elif sz == 'full': w = h = None elif 'x' in sz: try: w, h = map(int, sz.partition('x')[::2]) except Exception: pass else: try: w = h = int(sz) except Exception: pass return cover(ctx, rd, library_id, db, book_id, width=w, height=h) elif what == 'cover': return cover(ctx, rd, library_id, db, book_id) elif what == 'opf': mi = db.get_metadata(book_id, get_cover=False) rd.outheaders['Content-Type'] = 'application/oebps-package+xml; charset=UTF-8' rd.outheaders['Last-Modified'] = http_date(timestampfromdt(mi.last_modified)) return metadata_to_opf(mi) elif what == 'json': from calibre.srv.ajax import book_to_json data, last_modified = book_to_json(ctx, rd, db, book_id) rd.outheaders['Last-Modified'] = http_date(timestampfromdt(last_modified)) return json(ctx, rd, get, data) else: try: return book_fmt(ctx, rd, library_id, db, book_id, what.lower()) except NoSuchFormat: raise HTTPNotFound('No %s format for the book %r' % (what.lower(), book_id))
def simple_response(self, status_code, msg="", close_after_response=True, extra_headers=None): if self.response_protocol is HTTP1: # HTTP/1.0 has no 413/414/303 codes status_code = { httplib.REQUEST_ENTITY_TOO_LARGE: httplib.BAD_REQUEST, httplib.REQUEST_URI_TOO_LONG: httplib.BAD_REQUEST, httplib.SEE_OTHER: httplib.FOUND, }.get(status_code, status_code) self.close_after_response = close_after_response msg = msg.encode("utf-8") ct = "http" if self.method == "TRACE" else "plain" buf = [ "%s %d %s" % (self.response_protocol, status_code, httplib.responses[status_code]), "Content-Length: %s" % len(msg), "Content-Type: text/%s; charset=UTF-8" % ct, "Date: " + http_date(), ] if self.close_after_response and self.response_protocol is HTTP11: buf.append("Connection: close") if extra_headers is not None: for h, v in extra_headers.iteritems(): buf.append("%s: %s" % (h, v)) buf.append("") buf = [(x + "\r\n").encode("ascii") for x in buf] if self.method != "HEAD": buf.append(msg) self.response_ready(BytesIO(b"".join(buf)))
def opds(ctx, rd): rc = RequestContext(ctx, rd) db = rc.db try: categories = rc.get_categories(report_parse_errors=True) except ParseException as p: raise HTTPInternalServerError(p.msg) category_meta = db.field_metadata cats = [ (_('Newest'), _('Date'), 'Onewest'), (_('Title'), _('Title'), 'Otitle'), ] def getter(x): try: return category_meta[x]['name'].lower() except KeyError: return x fm = rc.db.field_metadata for category in sorted(categories, key=lambda x: sort_key(getter(x))): if fm.is_ignorable_field(category) and not rc.ctx.is_field_displayable(category): continue if len(categories[category]) == 0: continue if category in ('formats', 'identifiers'): continue meta = category_meta.get(category, None) if meta is None: continue cats.append((meta['name'], meta['name'], 'N'+category)) last_modified = db.last_modified() rd.outheaders['Last-Modified'] = http_date(timestampfromdt(last_modified)) return TopLevel(last_modified, cats, rc).root
def opds(ctx, rd): rc = RequestContext(ctx, rd) db = rc.db categories = rc.get_categories() category_meta = db.field_metadata cats = [(_("Newest"), _("Date"), "Onewest"), (_("Title"), _("Title"), "Otitle")] def getter(x): try: return category_meta[x]["name"].lower() except KeyError: return x for category in sorted(categories, key=lambda x: sort_key(getter(x))): if len(categories[category]) == 0: continue if category in ("formats", "identifiers"): continue meta = category_meta.get(category, None) if meta is None: continue cats.append((meta["name"], meta["name"], "N" + category)) last_modified = db.last_modified() rd.outheaders["Last-Modified"] = http_date(timestampfromdt(last_modified)) return TopLevel(last_modified, cats, rc).root
def get_acquisition_feed(rc, ids, offset, page_url, up_url, id_, sort_by='title', ascending=True, feed_title=None): if not ids: raise HTTPNotFound('No books found') with rc.db.safe_read_lock: sort_by = sanitize_sort_field_name(rc.db.field_metadata, sort_by) items = rc.db.multisort([(sort_by, ascending)], ids) max_items = rc.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = items[offsets.offset:offsets.offset + max_items] lm = rc.last_modified() rc.outheaders['Last-Modified'] = http_date(timestampfromdt(lm)) return AcquisitionFeed(id_, lm, rc, items, offsets, page_url, up_url, title=feed_title).root
def opds(ctx, rd): rc = RequestContext(ctx, rd) db = rc.db categories = rc.get_categories() category_meta = db.field_metadata cats = [ (_('Newest'), _('Date'), 'Onewest'), (_('Title'), _('Title'), 'Otitle'), ] def getter(x): try: return category_meta[x]['name'].lower() except KeyError: return x for category in sorted(categories, key=lambda x: sort_key(getter(x))): if len(categories[category]) == 0: continue if category in ('formats', 'identifiers'): continue meta = category_meta.get(category, None) if meta is None: continue cats.append((meta['name'], meta['name'], 'N' + category)) last_modified = db.last_modified() rd.outheaders['Last-Modified'] = http_date(timestampfromdt(last_modified)) return TopLevel(last_modified, cats, rc).root
def send_range_not_satisfiable(self, content_length): buf = [ '%s %d %s' % (self.response_protocol, httplib.REQUESTED_RANGE_NOT_SATISFIABLE, httplib.responses[httplib.REQUESTED_RANGE_NOT_SATISFIABLE]), "Date: " + http_date(), "Content-Range: bytes */%d" % content_length, ] self.response_ready(header_list_to_file(buf))
def mobile(ctx, rd): db, library_id, library_map, default_library = get_library_data(ctx, rd) try: start = max(1, int(rd.query.get('start', 1))) except ValueError: raise HTTPBadRequest('start is not an integer') try: num = max(0, int(rd.query.get('num', 25))) except ValueError: raise HTTPBadRequest('num is not an integer') search = rd.query.get('search') or '' with db.safe_read_lock: book_ids = ctx.search(rd, db, search) total = len(book_ids) ascending = rd.query.get('order', '').lower().strip() == 'ascending' sort_by = sanitize_sort_field_name(db.field_metadata, rd.query.get('sort') or 'date') try: book_ids = db.multisort([(sort_by, ascending)], book_ids) except Exception: sort_by = 'date' book_ids = db.multisort([(sort_by, ascending)], book_ids) books = [db.get_metadata(book_id) for book_id in book_ids[(start-1):(start-1)+num]] rd.outheaders['Last-Modified'] = http_date(timestampfromdt(db.last_modified())) order = 'ascending' if ascending else 'descending' q = {b'search':search.encode('utf-8'), b'order':bytes(order), b'sort':sort_by.encode('utf-8'), b'num':bytes(num), 'library_id':library_id} url_base = ctx.url_for('/mobile') + '?' + urlencode(q) lm = {k:v for k, v in library_map.iteritems() if k != library_id} return build_index(books, num, search, sort_by, order, start, total, url_base, db.field_metadata, ctx, lm, library_id)
def send_range_not_satisfiable(self, content_length): buf = [ '%s %d %s' % (self.response_protocol, httplib.REQUESTED_RANGE_NOT_SATISFIABLE, httplib.responses[httplib.REQUESTED_RANGE_NOT_SATISFIABLE]), "Date: " + http_date(), "Content-Range: bytes */%d" % content_length, ] self.response_ready(header_list_to_file(buf))
def opds(ctx, rd): rc = RequestContext(ctx, rd) db = rc.db try: categories = rc.get_categories(report_parse_errors=True) except ParseException as p: raise HTTPInternalServerError(p.msg) category_meta = db.field_metadata cats = [ (_('Newest'), _('Date'), 'Onewest'), (_('Title'), _('Title'), 'Otitle'), ] def getter(x): try: return category_meta[x]['name'].lower() except KeyError: return x fm = rc.db.field_metadata for category in sorted(categories, key=lambda x: sort_key(getter(x))): if fm.is_ignorable_field(category) and not rc.ctx.is_field_displayable(category): continue if len(categories[category]) == 0: continue if category in ('formats', 'identifiers'): continue meta = category_meta.get(category, None) if meta is None: continue cats.append((meta['name'], meta['name'], 'N'+category)) last_modified = db.last_modified() rd.outheaders['Last-Modified'] = http_date(timestampfromdt(last_modified)) return TopLevel(last_modified, cats, rc).root
def simple_response(self, status_code, msg='', close_after_response=True, extra_headers=None): if self.response_protocol is HTTP1: # HTTP/1.0 has no 413/414/303 codes status_code = { httplib.REQUEST_ENTITY_TOO_LARGE:httplib.BAD_REQUEST, httplib.REQUEST_URI_TOO_LONG:httplib.BAD_REQUEST, httplib.SEE_OTHER:httplib.FOUND }.get(status_code, status_code) self.close_after_response = close_after_response msg = msg.encode('utf-8') ct = 'http' if self.method == 'TRACE' else 'plain' buf = [ '%s %d %s' % (self.response_protocol, status_code, httplib.responses[status_code]), "Content-Length: %s" % len(msg), "Content-Type: text/%s; charset=UTF-8" % ct, "Date: " + http_date(), ] if self.close_after_response and self.response_protocol is HTTP11: buf.append("Connection: close") if extra_headers is not None: for h, v in extra_headers.iteritems(): buf.append('%s: %s' % (h, v)) buf.append('') buf = [(x + '\r\n').encode('ascii') for x in buf] if self.method != 'HEAD': buf.append(msg) response_data = b''.join(buf) self.log_access(status_code=status_code, response_size=len(response_data)) self.response_ready(ReadOnlyFileBuffer(response_data))
def mobile(ctx, rd): db, library_id, library_map, default_library = get_library_data(ctx, rd) try: start = max(1, int(rd.query.get('start', 1))) except ValueError: raise HTTPBadRequest('start is not an integer') try: num = max(0, int(rd.query.get('num', 25))) except ValueError: raise HTTPBadRequest('num is not an integer') search = rd.query.get('search') or '' with db.safe_read_lock: book_ids = ctx.search(rd, db, search) total = len(book_ids) ascending = rd.query.get('order', '').lower().strip() == 'ascending' sort_by = sanitize_sort_field_name(db.field_metadata, rd.query.get('sort') or 'date') try: book_ids = db.multisort([(sort_by, ascending)], book_ids) except Exception: sort_by = 'date' book_ids = db.multisort([(sort_by, ascending)], book_ids) books = [db.get_metadata(book_id) for book_id in book_ids[(start-1):(start-1)+num]] rd.outheaders['Last-Modified'] = http_date(timestampfromdt(db.last_modified())) order = 'ascending' if ascending else 'descending' q = {b'search':search.encode('utf-8'), b'order':bytes(order), b'sort':sort_by.encode('utf-8'), b'num':bytes(num), 'library_id':library_id} url_base = ctx.url_for('/mobile') + '?' + urlencode(q) lm = {k:v for k, v in iteritems(library_map) if k != library_id} return build_index(rd, books, num, search, sort_by, order, start, total, url_base, db.field_metadata, ctx, lm, library_id)
def simple_response(self, status_code, msg='', close_after_response=True, extra_headers=None): if self.response_protocol is HTTP1: # HTTP/1.0 has no 413/414/303 codes status_code = { httplib.REQUEST_ENTITY_TOO_LARGE: httplib.BAD_REQUEST, httplib.REQUEST_URI_TOO_LONG: httplib.BAD_REQUEST, httplib.SEE_OTHER: httplib.FOUND }.get(status_code, status_code) self.close_after_response = close_after_response msg = msg.encode('utf-8') ct = 'http' if self.method == 'TRACE' else 'plain' buf = [ '%s %d %s' % (self.response_protocol, status_code, httplib.responses[status_code]), "Content-Length: %s" % len(msg), "Content-Type: text/%s; charset=UTF-8" % ct, "Date: " + http_date(), ] if self.close_after_response and self.response_protocol is HTTP11: buf.append("Connection: close") if extra_headers is not None: for h, v in extra_headers.iteritems(): buf.append('%s: %s' % (h, v)) buf.append('') buf = [(x + '\r\n').encode('ascii') for x in buf] if self.method != 'HEAD': buf.append(msg) self.response_ready(ReadOnlyFileBuffer(b''.join(buf)))
def opds(ctx, rd): rc = RequestContext(ctx, rd) db = rc.db categories = rc.get_categories() category_meta = db.field_metadata cats = [ (_('Newest'), _('Date'), 'Onewest'), (_('Title'), _('Title'), 'Otitle'), ] def getter(x): try: return category_meta[x]['name'].lower() except KeyError: return x for category in sorted(categories, key=lambda x: sort_key(getter(x))): if len(categories[category]) == 0: continue if category in ('formats', 'identifiers'): continue meta = category_meta.get(category, None) if meta is None: continue cats.append((meta['name'], meta['name'], 'N'+category)) last_modified = db.last_modified() rd.outheaders['Last-Modified'] = http_date(timestampfromdt(last_modified)) return TopLevel(last_modified, cats, rc).root
def get_navcatalog(request_context, which, page_url, up_url, offset=0): categories = request_context.get_categories() if which not in categories: raise HTTPNotFound('Category %r not found' % which) items = categories[which] updated = request_context.last_modified() category_meta = request_context.db.field_metadata meta = category_meta.get(which, {}) category_name = meta.get('name', which) feed_title = default_feed_title + ' :: ' + _('By %s') % category_name id_ = 'calibre-category-feed:' + which MAX_ITEMS = request_context.opts.max_opds_ungrouped_items if MAX_ITEMS > 0 and len(items) <= MAX_ITEMS: max_items = request_context.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = list(items)[offsets.offset:offsets.offset + max_items] ans = CategoryFeed(items, which, id_, updated, request_context, offsets, page_url, up_url, title=feed_title) else: Group = namedtuple('Group', 'text count') starts = set() for x in items: val = getattr(x, 'sort', x.name) if not val: val = 'A' starts.add(val[0].upper()) category_groups = OrderedDict() for x in sorted(starts, key=sort_key): category_groups[x] = len( [y for y in items if getattr(y, 'sort', y.name).startswith(x)]) items = [Group(x, y) for x, y in category_groups.items()] max_items = request_context.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = items[offsets.offset:offsets.offset + max_items] ans = CategoryGroupFeed(items, which, id_, updated, request_context, offsets, page_url, up_url, title=feed_title) request_context.outheaders['Last-Modified'] = http_date( timestampfromdt(updated)) return ans.root
def dispatch(self, data): endpoint_, args = self.find_route(data.path) if data.method not in endpoint_.methods: raise HTTPSimpleResponse(http_client.METHOD_NOT_ALLOWED) self.read_cookies(data) if endpoint_.auth_required and self.auth_controller is not None: self.auth_controller(data, endpoint_) if endpoint_.ok_code is not None: data.status_code = endpoint_.ok_code self.init_session(endpoint_, data) if endpoint_.needs_db_write: self.ctx.check_for_write_access(data) ans = endpoint_(self.ctx, data, *args) self.finalize_session(endpoint_, data, ans) outheaders = data.outheaders pp = endpoint_.postprocess if pp is not None: ans = pp(self.ctx, data, endpoint_, ans) cc = endpoint_.cache_control if cc is not False and 'Cache-Control' not in data.outheaders: if cc is None or cc == 'no-cache': outheaders['Expires'] = http_date( 10000.0) # A date in the past outheaders['Cache-Control'] = 'no-cache, must-revalidate' outheaders['Pragma'] = 'no-cache' elif isinstance(cc, numbers.Number): cc = int(60 * 60 * cc) outheaders['Cache-Control'] = 'public, max-age=%d' % cc if cc == 0: cc -= 100000 outheaders['Expires'] = http_date(cc + time.time()) else: ctype, max_age = cc max_age = int(60 * 60 * max_age) outheaders['Cache-Control'] = '%s, max-age=%d' % (ctype, max_age) if max_age == 0: max_age -= 100000 outheaders['Expires'] = http_date(max_age + time.time()) return ans
def send_not_modified(self, etag=None): buf = [ '%s %d %s' % (self.response_protocol, httplib.NOT_MODIFIED, httplib.responses[httplib.NOT_MODIFIED]), "Content-Length: 0", "Date: " + http_date(), ] if etag is not None: buf.append('ETag: ' + etag) self.response_ready(header_list_to_file(buf))
def send_range_not_satisfiable(self, content_length): buf = [ '%s %d %s' % (self.response_protocol, httplib.REQUESTED_RANGE_NOT_SATISFIABLE, httplib.responses[httplib.REQUESTED_RANGE_NOT_SATISFIABLE]), "Date: " + http_date(), "Content-Range: bytes */%d" % content_length, ] response_data = header_list_to_file(buf) self.log_access(status_code=httplib.REQUESTED_RANGE_NOT_SATISFIABLE, response_size=response_data.sz) self.response_ready(response_data)
def send_not_modified(self, etag=None): buf = [ '%s %d %s' % (self.response_protocol, httplib.NOT_MODIFIED, httplib.responses[httplib.NOT_MODIFIED]), "Content-Length: 0", "Date: " + http_date(), ] if etag is not None: buf.append('ETag: ' + etag) self.response_ready(header_list_to_file(buf))
def job_done(self, ok, result): if not ok: etype, e, tb = result if isinstance(e, HTTPSimpleResponse): eh = {} if e.location: eh['Location'] = e.location if e.authenticate: eh['WWW-Authenticate'] = e.authenticate if e.log: self.log.warn(e.log) return self.simple_response( e.http_code, msg=e.message or '', close_after_response=e.close_connection, extra_headers=eh) raise etype, e, tb data, output = result output = self.finalize_output(output, data, self.method is HTTP1) if output is None: return outheaders = data.outheaders outheaders.set('Date', http_date(), replace_all=True) outheaders.set('Server', 'calibre %s' % __version__, replace_all=True) keep_alive = not self.close_after_response and self.opts.timeout > 0 if keep_alive: outheaders.set('Keep-Alive', 'timeout=%d' % int(self.opts.timeout)) if 'Connection' not in outheaders: if self.response_protocol is HTTP11: if self.close_after_response: outheaders.set('Connection', 'close') else: if not self.close_after_response: outheaders.set('Connection', 'Keep-Alive') ct = outheaders.get('Content-Type', '') if ct.startswith('text/') and 'charset=' not in ct: outheaders.set('Content-Type', ct + '; charset=UTF-8') buf = [ HTTP11 + (' %d ' % data.status_code) + httplib.responses[data.status_code] ] for header, value in sorted(outheaders.iteritems(), key=itemgetter(0)): buf.append('%s: %s' % (header, value)) for morsel in data.outcookie.itervalues(): morsel['version'] = '1' x = morsel.output() if isinstance(x, bytes): x = x.decode('ascii') buf.append(x) buf.append('') self.response_ready(ReadOnlyFileBuffer(b''.join( (x + '\r\n').encode('ascii') for x in buf)), output=output)
def send_range_not_satisfiable(self, content_length): buf = [ '%s %d %s' % (self.response_protocol, httplib.REQUESTED_RANGE_NOT_SATISFIABLE, httplib.responses[httplib.REQUESTED_RANGE_NOT_SATISFIABLE]), "Date: " + http_date(), "Content-Range: bytes */%d" % content_length, ] response_data = header_list_to_file(buf) self.log_access(status_code=httplib.REQUESTED_RANGE_NOT_SATISFIABLE, response_size=response_data.sz) self.response_ready(response_data)
def dispatch(self, data): endpoint_, args = self.find_route(data.path) if data.method not in endpoint_.methods: raise HTTPSimpleResponse(http_client.METHOD_NOT_ALLOWED) self.read_cookies(data) if endpoint_.auth_required and self.auth_controller is not None: self.auth_controller(data, endpoint_) if endpoint_.ok_code is not None: data.status_code = endpoint_.ok_code self.init_session(endpoint_, data) if endpoint_.needs_db_write: self.ctx.check_for_write_access(data) ans = endpoint_(self.ctx, data, *args) self.finalize_session(endpoint_, data, ans) outheaders = data.outheaders pp = endpoint_.postprocess if pp is not None: ans = pp(self.ctx, data, endpoint_, ans) cc = endpoint_.cache_control if cc is not False and 'Cache-Control' not in data.outheaders: if cc is None or cc == 'no-cache': outheaders['Expires'] = http_date(10000.0) # A date in the past outheaders['Cache-Control'] = 'no-cache, must-revalidate' outheaders['Pragma'] = 'no-cache' elif isinstance(cc, numbers.Number): cc = int(60 * 60 * cc) outheaders['Cache-Control'] = 'public, max-age=%d' % cc if cc == 0: cc -= 100000 outheaders['Expires'] = http_date(cc + time.time()) else: ctype, max_age = cc max_age = int(60 * 60 * max_age) outheaders['Cache-Control'] = '%s, max-age=%d' % (ctype, max_age) if max_age == 0: max_age -= 100000 outheaders['Expires'] = http_date(max_age + time.time()) return ans
def get(ctx, rd, what, book_id, library_id): db = ctx.get_library(library_id) if db is None: raise HTTPNotFound("Library %r not found" % library_id) with db.safe_read_lock: if book_id not in ctx.allowed_book_ids(rd, db): raise HTTPNotFound("Book with id %r does not exist" % book_id) library_id = db.server_library_id # in case library_id was None if what == "thumb": sz = rd.query.get("sz") w, h = 60, 80 if sz is None: pass elif sz == "full": w = h = None elif "x" in sz: try: w, h = map(int, sz.partition("x")[::2]) except Exception: pass else: try: w = h = int(sz) except Exception: pass return cover(ctx, rd, library_id, db, book_id, width=w, height=h) elif what == "cover": return cover(ctx, rd, library_id, db, book_id) elif what == "opf": mi = db.get_metadata(book_id, get_cover=False) rd.outheaders["Content-Type"] = "application/oebps-package+xml; charset=UTF-8" rd.outheaders["Last-Modified"] = http_date(timestampfromdt(mi.last_modified)) return metadata_to_opf(mi) elif what == "json": from calibre.srv.ajax import book_to_json data, last_modified = book_to_json(ctx, rd, db, book_id) rd.outheaders["Last-Modified"] = http_date(timestampfromdt(last_modified)) return json(ctx, rd, get, data) else: try: return book_fmt(ctx, rd, library_id, db, book_id, what.lower()) except NoSuchFormat: raise HTTPNotFound("No %r format for the book %r" % (what.lower(), book_id))
def job_done(self, ok, result): if not ok: etype, e, tb = result if isinstance(e, HTTPSimpleResponse): eh = {} if e.location: eh['Location'] = e.location if e.authenticate: eh['WWW-Authenticate'] = e.authenticate if e.log: self.log.warn(e.log) return self.simple_response(e.http_code, msg=e.message or '', close_after_response=e.close_connection, extra_headers=eh) raise etype, e, tb data, output = result output = self.finalize_output(output, data, self.method is HTTP1) if output is None: return outheaders = data.outheaders outheaders.set('Date', http_date(), replace_all=True) outheaders.set('Server', 'calibre %s' % __version__, replace_all=True) keep_alive = not self.close_after_response and self.opts.timeout > 0 if keep_alive: outheaders.set('Keep-Alive', 'timeout=%d' % int(self.opts.timeout)) if 'Connection' not in outheaders: if self.response_protocol is HTTP11: if self.close_after_response: outheaders.set('Connection', 'close') else: if not self.close_after_response: outheaders.set('Connection', 'Keep-Alive') ct = outheaders.get('Content-Type', '') if ct.startswith('text/') and 'charset=' not in ct: outheaders.set('Content-Type', ct + '; charset=UTF-8') buf = [HTTP11 + (' %d ' % data.status_code) + httplib.responses[data.status_code]] for header, value in sorted(outheaders.iteritems(), key=itemgetter(0)): buf.append('%s: %s' % (header, value)) for morsel in data.outcookie.itervalues(): morsel['version'] = '1' x = morsel.output() if isinstance(x, bytes): x = x.decode('ascii') buf.append(x) buf.append('') response_data = ReadOnlyFileBuffer(b''.join((x + '\r\n').encode('ascii') for x in buf)) if self.access_log is not None: sz = outheaders.get('Content-Length') if sz is not None: sz = int(sz) + response_data.sz self.log_access(status_code=data.status_code, response_size=sz, username=data.username) self.response_ready(response_data, output=output)
def send_not_modified(self, etag=None): buf = [ '%s %d %s' % (self.response_protocol, httplib.NOT_MODIFIED, httplib.responses[httplib.NOT_MODIFIED]), "Content-Length: 0", "Date: " + http_date(), ] if etag is not None: buf.append('ETag: ' + etag) response_data = header_list_to_file(buf) self.log_access(status_code=httplib.NOT_MODIFIED, response_size=response_data.sz) self.response_ready(response_data)
def books(ctx, rd, library_id): ''' Return the metadata for the books as a JSON dictionary. Query parameters: ?ids=all&category_urls=true&id_is_uuid=false&device_for_template=None If category_urls is true the returned dictionary also contains a mapping of category (field) names to URLs that return the list of books in the given category. If id_is_uuid is true then the book_id is assumed to be a book uuid instead. ''' db = get_db(ctx, rd, library_id) with db.safe_read_lock: id_is_uuid = rd.query.get('id_is_uuid', 'false') ids = rd.query.get('ids') if ids is None or ids == 'all': ids = db.all_book_ids() else: ids = ids.split(',') if id_is_uuid == 'true': ids = {db.lookup_by_uuid(x) for x in ids} ids.discard(None) else: try: ids = {int(x) for x in ids} except Exception: raise HTTPNotFound( 'ids must a comma separated list of integers') last_modified = None category_urls = rd.query.get('category_urls', 'true').lower() == 'true' device_compatible = rd.query.get('device_compatible', 'false').lower() == 'true' device_for_template = rd.query.get('device_for_template', None) ans = {} allowed_book_ids = ctx.allowed_book_ids(rd, db) for book_id in ids: if book_id not in allowed_book_ids: ans[book_id] = None continue data, lm = book_to_json(ctx, rd, db, book_id, get_category_urls=category_urls, device_compatible=device_compatible, device_for_template=device_for_template) last_modified = lm if last_modified is None else max( lm, last_modified) ans[book_id] = data if last_modified is not None: rd.outheaders['Last-Modified'] = http_date( timestampfromdt(last_modified)) return ans
def send_not_modified(self, etag=None): buf = [ '%s %d %s' % (self.response_protocol, httplib.NOT_MODIFIED, httplib.responses[httplib.NOT_MODIFIED]), "Content-Length: 0", "Date: " + http_date(), ] if etag is not None: buf.append('ETag: ' + etag) response_data = header_list_to_file(buf) self.log_access(status_code=httplib.NOT_MODIFIED, response_size=response_data.sz) self.response_ready(response_data)
def get_acquisition_feed(rc, ids, offset, page_url, up_url, id_, sort_by='title', ascending=True, feed_title=None): if not ids: raise HTTPNotFound('No books found') with rc.db.safe_read_lock: items = rc.db.multisort([(sort_by, ascending)], ids) max_items = rc.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = items[offsets.offset:offsets.offset+max_items] lm = rc.last_modified() rc.outheaders['Last-Modified'] = http_date(timestampfromdt(lm)) return AcquisitionFeed(id_, lm, rc, items, offsets, page_url, up_url, title=feed_title).root
def opds_categorygroup(ctx, rd, category, which): try: offset = int(rd.query.get('offset', 0)) except Exception: raise HTTPNotFound('Not found') if not which or not category: raise HTTPNotFound('Not found') rc = RequestContext(ctx, rd) categories = rc.get_categories() page_url = rc.url_for('/opds/categorygroup', category=category, which=which) category = from_hex_unicode(category) if category not in categories: raise HTTPNotFound('Category %r not found' % which) category_meta = rc.db.field_metadata meta = category_meta.get(category, {}) category_name = meta.get('name', which) which = from_hex_unicode(which) feed_title = default_feed_title + ' :: ' + (_('By {0} :: {1}').format( category_name, which)) owhich = as_hex_unicode('N' + which) up_url = rc.url_for('/opds/navcatalog', which=owhich) items = categories[category] def belongs(x, which): return getattr(x, 'sort', x.name).lower().startswith(which.lower()) items = [x for x in items if belongs(x, which)] if not items: raise HTTPNotFound('No items in group %r:%r' % (category, which)) updated = rc.last_modified() id_ = 'calibre-category-group-feed:' + category + ':' + which max_items = rc.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = list(items)[offsets.offset:offsets.offset + max_items] rc.outheaders['Last-Modified'] = http_date(timestampfromdt(updated)) return CategoryFeed(items, category, id_, updated, rc, offsets, page_url, up_url, title=feed_title).root
def books(ctx, rd, library_id): """ Return the metadata for the books as a JSON dictionary. Query parameters: ?ids=all&category_urls=true&id_is_uuid=false&device_for_template=None If category_urls is true the returned dictionary also contains a mapping of category (field) names to URLs that return the list of books in the given category. If id_is_uuid is true then the book_id is assumed to be a book uuid instead. """ db = get_db(ctx, library_id) with db.safe_read_lock: id_is_uuid = rd.query.get("id_is_uuid", "false") ids = rd.query.get("ids") if ids is None or ids == "all": ids = db.all_book_ids() else: ids = ids.split(",") if id_is_uuid == "true": ids = {db.lookup_by_uuid(x) for x in ids} ids.discard(None) else: try: ids = {int(x) for x in ids} except Exception: raise HTTPNotFound("ids must a comma separated list of integers") last_modified = None category_urls = rd.query.get("category_urls", "true").lower() == "true" device_compatible = rd.query.get("device_compatible", "false").lower() == "true" device_for_template = rd.query.get("device_for_template", None) ans = {} restricted_to = ctx.allowed_book_ids(rd, db) for book_id in ids: if book_id not in restricted_to: ans[book_id] = None continue data, lm = book_to_json( ctx, rd, db, book_id, get_category_urls=category_urls, device_compatible=device_compatible, device_for_template=device_for_template, ) last_modified = lm if last_modified is None else max(lm, last_modified) ans[book_id] = data if last_modified is not None: rd.outheaders["Last-Modified"] = http_date(timestampfromdt(last_modified)) return ans
def dispatch(self, data): endpoint_, args = self.find_route(data.path) if data.method not in endpoint_.methods: raise HTTPSimpleResponse(httplib.METHOD_NOT_ALLOWED) self.read_cookies(data) if endpoint_.auth_required and self.auth_controller is not None: self.auth_controller(data, endpoint_) self.init_session(endpoint_, data) ans = endpoint_(self.ctx, data, *args) self.finalize_session(endpoint_, data, ans) outheaders = data.outheaders pp = endpoint_.postprocess if pp is not None: ans = pp(self.ctx, data, endpoint_, ans) cc = endpoint_.cache_control if cc is not False and "Cache-Control" not in data.outheaders: if cc is None or cc == "no-cache": outheaders["Expires"] = http_date(10000.0) # A date in the past outheaders["Cache-Control"] = "no-cache, must-revalidate" outheaders["Pragma"] = "no-cache" elif isinstance(cc, numbers.Number): cc = int(60 * 60 * cc) outheaders["Cache-Control"] = "public, max-age=%d" % cc if cc == 0: cc -= 100000 outheaders["Expires"] = http_date(cc + time.time()) else: ctype, max_age = cc max_age = int(60 * 60 * max_age) outheaders["Cache-Control"] = "%s, max-age=%d" % (ctype, max_age) if max_age == 0: max_age -= 100000 outheaders["Expires"] = http_date(max_age + time.time()) return ans
def job_done(self, ok, result): if not ok: etype, e, tb = result if isinstance(e, HTTPSimpleResponse): eh = {} if e.location: eh["Location"] = e.location if e.authenticate: eh["WWW-Authenticate"] = e.authenticate if e.log: self.log.warn(e.log) return self.simple_response( e.http_code, msg=e.message or "", close_after_response=e.close_connection, extra_headers=eh ) raise etype, e, tb data, output = result output = self.finalize_output(output, data, self.method is HTTP1) if output is None: return outheaders = data.outheaders outheaders.set("Date", http_date(), replace_all=True) outheaders.set("Server", "calibre %s" % __version__, replace_all=True) keep_alive = not self.close_after_response and self.opts.timeout > 0 if keep_alive: outheaders.set("Keep-Alive", "timeout=%d" % int(self.opts.timeout)) if "Connection" not in outheaders: if self.response_protocol is HTTP11: if self.close_after_response: outheaders.set("Connection", "close") else: if not self.close_after_response: outheaders.set("Connection", "Keep-Alive") ct = outheaders.get("Content-Type", "") if ct.startswith("text/") and "charset=" not in ct: outheaders.set("Content-Type", ct + "; charset=UTF-8") buf = [HTTP11 + (" %d " % data.status_code) + httplib.responses[data.status_code]] for header, value in sorted(outheaders.iteritems(), key=itemgetter(0)): buf.append("%s: %s" % (header, value)) for morsel in data.outcookie.itervalues(): morsel["version"] = "1" x = morsel.output() if isinstance(x, bytes): x = x.decode("ascii") buf.append(x) buf.append("") self.response_ready(BytesIO(b"".join((x + "\r\n").encode("ascii") for x in buf)), output=output)
def books(ctx, rd, library_id): ''' Return the metadata for the books as a JSON dictionary. Query parameters: ?ids=all&category_urls=true&id_is_uuid=false&device_for_template=None If category_urls is true the returned dictionary also contains a mapping of category (field) names to URLs that return the list of books in the given category. If id_is_uuid is true then the book_id is assumed to be a book uuid instead. ''' db = ctx.get_library(library_id) if db is None: raise HTTPNotFound('Library %r not found' % library_id) with db.safe_read_lock: id_is_uuid = rd.query.get('id_is_uuid', 'false') ids = rd.query.get('ids') if ids is None or ids == 'all': ids = db.all_book_ids() else: ids = ids.split(',') if id_is_uuid == 'true': ids = {db.lookup_by_uuid(x) for x in ids} ids.discard(None) else: try: ids = {int(x) for x in ids} except Exception: raise HTTPNotFound('ids must a comma separated list of integers') last_modified = None category_urls = rd.query.get('category_urls', 'true').lower() == 'true' device_compatible = rd.query.get('device_compatible', 'false').lower() == 'true' device_for_template = rd.query.get('device_for_template', None) ans = {} restricted_to = ctx.restrict_to_ids(db, rd) for book_id in ids: if book_id not in restricted_to: ans[book_id] = None continue data, lm = book_to_json( ctx, rd, db, book_id, get_category_urls=category_urls, device_compatible=device_compatible, device_for_template=device_for_template) last_modified = lm if last_modified is None else max(lm, last_modified) ans[book_id] = data if last_modified is not None: rd.outheaders['Last-Modified'] = http_date(timestampfromdt(last_modified)) return ans
def get_navcatalog(request_context, which, page_url, up_url, offset=0): categories = request_context.get_categories() if which not in categories: raise HTTPNotFound('Category %r not found'%which) items = categories[which] updated = request_context.last_modified() category_meta = request_context.db.field_metadata meta = category_meta.get(which, {}) category_name = meta.get('name', which) feed_title = default_feed_title + ' :: ' + _('By %s') % category_name id_ = 'calibre-category-feed:'+which MAX_ITEMS = request_context.opts.max_opds_ungrouped_items if MAX_ITEMS > 0 and len(items) <= MAX_ITEMS: max_items = request_context.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = list(items)[offsets.offset:offsets.offset+max_items] ans = CategoryFeed(items, which, id_, updated, request_context, offsets, page_url, up_url, title=feed_title) else: Group = namedtuple('Group', 'text count') starts = set() for x in items: val = getattr(x, 'sort', x.name) if not val: val = 'A' starts.add(val[0].upper()) category_groups = OrderedDict() for x in sorted(starts, key=sort_key): category_groups[x] = len([y for y in items if getattr(y, 'sort', y.name).startswith(x)]) items = [Group(x, y) for x, y in category_groups.items()] max_items = request_context.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = items[offsets.offset:offsets.offset+max_items] ans = CategoryGroupFeed(items, which, id_, updated, request_context, offsets, page_url, up_url, title=feed_title) request_context.outheaders['Last-Modified'] = http_date(timestampfromdt(updated)) return ans.root
def opds_categorygroup(ctx, rd, category, which): try: offset = int(rd.query.get("offset", 0)) except Exception: raise HTTPNotFound("Not found") if not which or not category: raise HTTPNotFound("Not found") rc = RequestContext(ctx, rd) categories = rc.get_categories() page_url = rc.url_for("/opds/categorygroup", category=category, which=which) category = unhexlify(category) if category not in categories: raise HTTPNotFound("Category %r not found" % which) category_meta = rc.db.field_metadata meta = category_meta.get(category, {}) category_name = meta.get("name", which) which = unhexlify(which) feed_title = default_feed_title + " :: " + (_("By {0} :: {1}").format(category_name, which)) owhich = hexlify("N" + which) up_url = rc.url_for("/opds/navcatalog", which=owhich) items = categories[category] def belongs(x, which): return getattr(x, "sort", x.name).lower().startswith(which.lower()) items = [x for x in items if belongs(x, which)] if not items: raise HTTPNotFound("No items in group %r:%r" % (category, which)) updated = rc.last_modified() id_ = "calibre-category-group-feed:" + category + ":" + which max_items = rc.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = list(items)[offsets.offset : offsets.offset + max_items] rc.outheaders["Last-Modified"] = http_date(timestampfromdt(updated)) return CategoryFeed(items, category, id_, updated, rc, offsets, page_url, up_url, title=feed_title).root
def prepare_response(self, inheaders, request_body_file): if self.method == 'TRACE': msg = force_unicode(self.request_line, 'utf-8') + '\n' + inheaders.pretty() return self.simple_response(httplib.OK, msg, close_after_response=False) request_body_file.seek(0) outheaders = MultiDict() data = RequestData( self.method, self.path, self.query, inheaders, request_body_file, outheaders, self.response_protocol, self.static_cache, self.opts, self.remote_addr, self.remote_port ) try: output = self.request_handler(data) except HTTP404 as e: return self.simple_response(httplib.NOT_FOUND, msg=e.message or '', close_after_response=False) output = self.finalize_output(output, data, self.method is HTTP1) if output is None: return outheaders.set('Date', http_date(), replace_all=True) outheaders.set('Server', 'calibre %s' % __version__, replace_all=True) keep_alive = not self.close_after_response and self.opts.timeout > 0 if keep_alive: outheaders.set('Keep-Alive', 'timeout=%d' % int(self.opts.timeout)) if 'Connection' not in outheaders: if self.response_protocol is HTTP11: if self.close_after_response: outheaders.set('Connection', 'close') else: if not self.close_after_response: outheaders.set('Connection', 'Keep-Alive') ct = outheaders.get('Content-Type', '') if ct.startswith('text/') and 'charset=' not in ct: outheaders.set('Content-Type', ct + '; charset=UTF-8') buf = [HTTP11 + (' %d ' % data.status_code) + httplib.responses[data.status_code]] for header, value in sorted(outheaders.iteritems(), key=itemgetter(0)): buf.append('%s: %s' % (header, value)) buf.append('') self.response_ready(BytesIO(b''.join((x + '\r\n').encode('ascii') for x in buf)), output=output)
def book(ctx, rd, book_id, library_id): """ Return the metadata of the book as a JSON dictionary. Query parameters: ?category_urls=true&id_is_uuid=false&device_for_template=None If category_urls is true the returned dictionary also contains a mapping of category (field) names to URLs that return the list of books in the given category. If id_is_uuid is true then the book_id is assumed to be a book uuid instead. """ db = get_db(ctx, library_id) with db.safe_read_lock: id_is_uuid = rd.query.get("id_is_uuid", "false") oid = book_id if id_is_uuid == "true": book_id = db.lookup_by_uuid(book_id) else: try: book_id = int(book_id) if not db.has_id(book_id): book_id = None except Exception: book_id = None if book_id is None or book_id not in ctx.allowed_book_ids(rd, db): raise HTTPNotFound("Book with id %r does not exist" % oid) category_urls = rd.query.get("category_urls", "true").lower() device_compatible = rd.query.get("device_compatible", "false").lower() device_for_template = rd.query.get("device_for_template", None) data, last_modified = book_to_json( ctx, rd, db, book_id, get_category_urls=category_urls == "true", device_compatible=device_compatible == "true", device_for_template=device_for_template, ) rd.outheaders["Last-Modified"] = http_date(timestampfromdt(last_modified)) return data
def simple_response(self, status_code, msg='', close_after_response=True): if self.response_protocol is HTTP1 and status_code in (httplib.REQUEST_ENTITY_TOO_LARGE, httplib.REQUEST_URI_TOO_LONG): # HTTP/1.0 has no 413/414 codes status_code = httplib.BAD_REQUEST self.close_after_response = close_after_response msg = msg.encode('utf-8') ct = 'http' if self.method == 'TRACE' else 'plain' buf = [ '%s %d %s' % (self.response_protocol, status_code, httplib.responses[status_code]), "Content-Length: %s" % len(msg), "Content-Type: text/%s; charset=UTF-8" % ct, "Date: " + http_date(), ] if self.close_after_response and self.response_protocol is HTTP11: buf.append("Connection: close") buf.append('') buf = [(x + '\r\n').encode('ascii') for x in buf] if self.method != 'HEAD': buf.append(msg) self.response_ready(BytesIO(b''.join(buf)))
def opds_categorygroup(ctx, rd, category, which): try: offset = int(rd.query.get('offset', 0)) except Exception: raise HTTPNotFound('Not found') if not which or not category: raise HTTPNotFound('Not found') rc = RequestContext(ctx, rd) categories = rc.get_categories() page_url = rc.url_for('/opds/categorygroup', category=category, which=which) category = from_hex_unicode(category) if category not in categories: raise HTTPNotFound('Category %r not found'%which) category_meta = rc.db.field_metadata meta = category_meta.get(category, {}) category_name = meta.get('name', which) which = from_hex_unicode(which) feed_title = default_feed_title + ' :: ' + (_('By {0} :: {1}').format(category_name, which)) owhich = as_hex_unicode('N'+which) up_url = rc.url_for('/opds/navcatalog', which=owhich) items = categories[category] def belongs(x, which): return getattr(x, 'sort', x.name).lower().startswith(which.lower()) items = [x for x in items if belongs(x, which)] if not items: raise HTTPNotFound('No items in group %r:%r'%(category, which)) updated = rc.last_modified() id_ = 'calibre-category-group-feed:'+category+':'+which max_items = rc.opts.max_opds_items offsets = Offsets(offset, max_items, len(items)) items = list(items)[offsets.offset:offsets.offset+max_items] rc.outheaders['Last-Modified'] = http_date(timestampfromdt(updated)) return CategoryFeed(items, category, id_, updated, rc, offsets, page_url, up_url, title=feed_title).root
def book(ctx, rd, book_id, library_id): ''' Return the metadata of the book as a JSON dictionary. Query parameters: ?category_urls=true&id_is_uuid=false&device_for_template=None If category_urls is true the returned dictionary also contains a mapping of category (field) names to URLs that return the list of books in the given category. If id_is_uuid is true then the book_id is assumed to be a book uuid instead. ''' db = get_db(ctx, rd, library_id) with db.safe_read_lock: id_is_uuid = rd.query.get('id_is_uuid', 'false') oid = book_id if id_is_uuid == 'true': book_id = db.lookup_by_uuid(book_id) else: try: book_id = int(book_id) if not db.has_id(book_id): book_id = None except Exception: book_id = None if book_id is None or not ctx.has_id(rd, db, book_id): raise BookNotFound(oid, db) category_urls = rd.query.get('category_urls', 'true').lower() device_compatible = rd.query.get('device_compatible', 'false').lower() device_for_template = rd.query.get('device_for_template', None) data, last_modified = book_to_json( ctx, rd, db, book_id, get_category_urls=category_urls == 'true', device_compatible=device_compatible == 'true', device_for_template=device_for_template) rd.outheaders['Last-Modified'] = http_date(timestampfromdt(last_modified)) return data
def book(ctx, rd, book_id, library_id): ''' Return the metadata of the book as a JSON dictionary. Query parameters: ?category_urls=true&id_is_uuid=false&device_for_template=None If category_urls is true the returned dictionary also contains a mapping of category (field) names to URLs that return the list of books in the given category. If id_is_uuid is true then the book_id is assumed to be a book uuid instead. ''' db = ctx.get_library(library_id) if db is None: raise HTTPNotFound('Library %r not found' % library_id) with db.safe_read_lock: id_is_uuid = rd.query.get('id_is_uuid', 'false') oid = book_id if id_is_uuid == 'true': book_id = db.lookup_by_uuid(book_id) else: try: book_id = int(book_id) if not db.has_id(book_id): book_id = None except Exception: book_id = None if book_id is None: raise HTTPNotFound('Book with id %r does not exist' % oid) category_urls = rd.query.get('category_urls', 'true').lower() device_compatible = rd.query.get('device_compatible', 'false').lower() device_for_template = rd.query.get('device_for_template', None) data, last_modified = book_to_json(ctx, rd, db, book_id, get_category_urls=category_urls == 'true', device_compatible=device_compatible == 'true', device_for_template=device_for_template) rd.outheaders['Last-Modified'] = http_date(timestampfromdt(last_modified)) return data
def job_done(self, ok, result): if not ok: etype, e, tb = result if isinstance(e, HTTPSimpleResponse): eh = {} if e.location: eh['Location'] = e.location return self.simple_response(e.http_code, msg=e.message or '', close_after_response=e.close_connection, extra_headers=eh) raise etype, e, tb data, output = result output = self.finalize_output(output, data, self.method is HTTP1) if output is None: return outheaders = data.outheaders outheaders.set('Date', http_date(), replace_all=True) outheaders.set('Server', 'calibre %s' % __version__, replace_all=True) keep_alive = not self.close_after_response and self.opts.timeout > 0 if keep_alive: outheaders.set('Keep-Alive', 'timeout=%d' % int(self.opts.timeout)) if 'Connection' not in outheaders: if self.response_protocol is HTTP11: if self.close_after_response: outheaders.set('Connection', 'close') else: if not self.close_after_response: outheaders.set('Connection', 'Keep-Alive') ct = outheaders.get('Content-Type', '') if ct.startswith('text/') and 'charset=' not in ct: outheaders.set('Content-Type', ct + '; charset=UTF-8') buf = [HTTP11 + (' %d ' % data.status_code) + httplib.responses[data.status_code]] for header, value in sorted(outheaders.iteritems(), key=itemgetter(0)): buf.append('%s: %s' % (header, value)) buf.append('') self.response_ready(BytesIO(b''.join((x + '\r\n').encode('ascii') for x in buf)), output=output)