def cdb_add_book(ctx, rd, job_id, add_duplicates, filename, library_id): ''' Add a file as a new book. The file contents must be in the body of the request. The response will also have the title/authors/languages read from the metadata of the file/filename. It will contain a `book_id` field specifying the id of the newly added book, or if add_duplicates is not specified and a duplicate was found, no book_id will be present. It will also return the value of `job_id` as the `id` field and `filename` as the `filename` field. ''' db = get_db(ctx, rd, library_id) if ctx.restriction_for(rd, db): raise HTTPForbidden('Cannot use the add book interface with a user who has per library restrictions') if not filename: raise HTTPBadRequest('An empty filename is not allowed') sfilename = sanitize_file_name_unicode(filename) fmt = os.path.splitext(sfilename)[1] fmt = fmt[1:] if fmt else None if not fmt: raise HTTPBadRequest('An filename with no extension is not allowed') if isinstance(rd.request_body_file, BytesIO): raise HTTPBadRequest('A request body containing the file data must be specified') add_duplicates = add_duplicates in ('y', '1') path = os.path.join(rd.tdir, sfilename) rd.request_body_file.name = path rd.request_body_file.seek(0) mi = get_metadata(rd.request_body_file, stream_type=fmt, use_libprs_metadata=True) rd.request_body_file.seek(0) ids, duplicates = db.add_books([(mi, {fmt: rd.request_body_file})], add_duplicates=add_duplicates) ans = {'title': mi.title, 'authors': mi.authors, 'languages': mi.languages, 'filename': filename, 'id': job_id} if ids: ans['book_id'] = ids[0] books_added(ids) return ans
def empty(db, notify_changes, is_remote, args): mi = args[0] ids, duplicates = db.add_books([(mi, {})]) if is_remote: notify_changes(books_added(ids)) db.dump_metadata() return ids, bool(duplicates)
def format_group(db, notify_changes, is_remote, args): formats, add_duplicates, cover_data = args with add_ctx(), TemporaryDirectory( 'add-multiple') as tdir, run_import_plugins_before_metadata(tdir): if is_remote: paths = [] for name, data in formats: with lopen(os.path.join(tdir, os.path.basename(name)), 'wb') as f: f.write(data) paths.append(f.name) else: paths = list(formats) paths = run_import_plugins(paths) mi = metadata_from_formats(paths) if mi.title is None: return None, set(), False if cover_data and not mi.cover_data or not mi.cover_data[1]: mi.cover_data = 'jpeg', cover_data ids, dups = db.add_books([(mi, create_format_map(paths))], add_duplicates=add_duplicates, run_hooks=False) if is_remote: notify_changes(books_added(ids)) db.dump_metadata() return mi.title, ids, bool(dups)
def add_books(db, notify_changes, is_remote, args): books, kwargs = args if is_remote: books = [(mi, {k:to_stream(v) for k, v in fmt_map.iteritems()}) for mi, fmt_map in books] ids, duplicates = db.add_books(books, **kwargs) if is_remote: notify_changes(books_added(ids)) db.dump_metadata() return ids, [(mi.title, [getattr(x, 'name', '<stream>') for x in format_map.itervalues()]) for mi, format_map in duplicates]
def book(db, notify_changes, is_remote, args): data, fmt, mi, add_duplicates = args if is_remote: data = to_stream(data) ids, duplicates = db.add_books( [(mi, {fmt: data})], add_duplicates=add_duplicates) if is_remote: notify_changes(books_added(ids)) db.dump_metadata() return ids, bool(duplicates)
def book(db, notify_changes, is_remote, args): data, fname, fmt, add_duplicates, otitle, oauthors, oisbn, otags, oseries, oseries_index, ocover, oidentifiers, olanguages = args with add_ctx(), TemporaryDirectory( 'add-single') as tdir, run_import_plugins_before_metadata(tdir): if is_remote: with lopen(os.path.join(tdir, fname), 'wb') as f: f.write(data[1]) path = f.name else: path = data path = run_import_plugins([path])[0] fmt = os.path.splitext(path)[1] fmt = (fmt[1:] if fmt else None) or 'unknown' with lopen(path, 'rb') as stream: mi = get_metadata(stream, stream_type=fmt, use_libprs_metadata=True) if not mi.title: mi.title = os.path.splitext(os.path.basename(path))[0] if not mi.authors: mi.authors = [_('Unknown')] if oidentifiers: ids = mi.get_identifiers() ids.update(oidentifiers) mi.set_identifiers(ids) for x in ('title', 'authors', 'isbn', 'tags', 'series', 'languages'): val = locals()['o' + x] if val: setattr(mi, x, val) if oseries: mi.series_index = oseries_index if ocover: mi.cover = None mi.cover_data = ocover ids, duplicates = db.add_books([(mi, { fmt: path })], add_duplicates=add_duplicates, run_hooks=False) if is_remote: notify_changes(books_added(ids)) db.dump_metadata() return ids, bool(duplicates), mi.title
def format_group(db, notify_changes, is_remote, args): formats, add_duplicates = args with add_ctx(), TemporaryDirectory('add-multiple') as tdir, run_import_plugins_before_metadata(tdir): if is_remote: paths = [] for name, data in formats: with lopen(os.path.join(tdir, name), 'wb') as f: f.write(data) paths.append(f.name) else: paths = list(formats) paths = run_import_plugins(paths) mi = metadata_from_formats(paths) if mi.title is None: return None, set(), False ids, dups = db.add_books([(mi, create_format_map(paths))], add_duplicates=add_duplicates, run_hooks=False) if is_remote: notify_changes(books_added(ids)) db.dump_metadata() return mi.title, ids, bool(dups)
def book(db, notify_changes, is_remote, args): data, fname, fmt, add_duplicates, otitle, oauthors, oisbn, otags, oseries, oseries_index, ocover, oidentifiers, olanguages = args with add_ctx(), TemporaryDirectory('add-single') as tdir, run_import_plugins_before_metadata(tdir): if is_remote: with lopen(os.path.join(tdir, fname), 'wb') as f: f.write(data[1]) path = f.name else: path = data path = run_import_plugins([path])[0] fmt = os.path.splitext(path)[1] fmt = (fmt[1:] if fmt else None) or 'unknown' with lopen(path, 'rb') as stream: mi = get_metadata(stream, stream_type=fmt, use_libprs_metadata=True) if not mi.title: mi.title = os.path.splitext(os.path.basename(path))[0] if not mi.authors: mi.authors = [_('Unknown')] if oidentifiers: ids = mi.get_identifiers() ids.update(oidentifiers) mi.set_identifiers(ids) for x in ('title', 'authors', 'isbn', 'tags', 'series', 'languages'): val = locals()['o' + x] if val: setattr(mi, x, val) if oseries: mi.series_index = oseries_index if ocover: mi.cover = None mi.cover_data = ocover ids, duplicates = db.add_books( [(mi, {fmt: path})], add_duplicates=add_duplicates, run_hooks=False) if is_remote: notify_changes(books_added(ids)) db.dump_metadata() return ids, bool(duplicates), mi.title
def do_adding(db, request_id, notify_changes, is_remote, mi, format_map, add_duplicates, oautomerge): identical_book_list, added_ids, updated_ids = set(), set(), set() duplicates = [] identical_books_data = None def add_format(book_id, fmt): db.add_format(book_id, fmt, format_map[fmt], replace=True, run_hooks=False) updated_ids.add(book_id) def add_book(): nonlocal added_ids added_ids_, duplicates_ = db.add_books([(mi, format_map)], add_duplicates=True, run_hooks=False) added_ids |= set(added_ids_) duplicates.extend(duplicates_) if oautomerge != 'disabled' or not add_duplicates: identical_books_data = cached_identical_book_data(db, request_id) identical_book_list = find_identical_books(mi, identical_books_data) if oautomerge != 'disabled': if identical_book_list: needs_add = False duplicated_formats = set() for book_id in identical_book_list: book_formats = {q.upper() for q in db.formats(book_id)} input_formats = {q.upper(): q for q in format_map} common_formats = book_formats & set(input_formats) if not common_formats: for x in input_formats: add_format(book_id, input_formats[x]) else: new_formats = set(input_formats) - book_formats if new_formats: for x in new_formats: add_format(book_id, input_formats[x]) if oautomerge == 'overwrite': for x in common_formats: add_format(book_id, input_formats[x]) elif oautomerge == 'ignore': for x in common_formats: duplicated_formats.add(input_formats[x]) elif oautomerge == 'new_record': needs_add = True if needs_add: add_book() if duplicated_formats: duplicates.append( (mi, {x: format_map[x] for x in duplicated_formats})) else: add_book() else: if identical_book_list: duplicates.append((mi, format_map)) else: add_book() if added_ids and identical_books_data is not None: for book_id in added_ids: db.update_data_for_find_identical_books(book_id, identical_books_data) if is_remote: notify_changes(books_added(added_ids)) if updated_ids: notify_changes( formats_added( {book_id: tuple(format_map) for book_id in updated_ids})) db.dump_metadata() return added_ids, updated_ids, duplicates