Пример #1
0
def implementation(db, notify_changes, action, *args):
    is_remote = notify_changes is not None
    if action == 'field_metadata':
        return db.field_metadata
    if action == 'opf':
        book_id, mi = args
        with db.write_lock:
            if not db.has_id(book_id):
                return
            changed_ids = db.set_metadata(book_id, mi, force_changes=True, allow_case_change=False)
            if is_remote:
                notify_changes(metadata(changed_ids))
            return db.get_metadata(book_id)
    if action == 'fields':
        book_id, fvals = args
        with db.write_lock:
            if not db.has_id(book_id):
                return
            mi = db.get_metadata(book_id)
            for field, val in fvals:
                if field.endswith('_index'):
                    sname = mi.get(field[:-6])
                    if sname:
                        mi.set(field[:-6], sname, extra=val)
                        if field == 'series_index':
                            mi.series_index = val  # extra has no effect for the builtin series field
                elif field == 'cover':
                    if is_remote:
                        mi.cover_data = None, val[1]
                    else:
                        mi.cover = val
                        read_cover(mi)
                else:
                    mi.set(field, val)
            changed_ids = db.set_metadata(book_id, mi, force_changes=True, allow_case_change=True)
            if is_remote:
                notify_changes(metadata(changed_ids))
            return db.get_metadata(book_id)
Пример #2
0
def do_add_empty(dbctx, title, authors, isbn, tags, series, series_index,
                 cover, identifiers, languages):
    mi = MetaInformation(None)
    if title is not None:
        mi.title = title
    if authors:
        mi.authors = authors
    if identifiers:
        mi.set_identifiers(identifiers)
    if isbn:
        mi.isbn = isbn
    if tags:
        mi.tags = tags
    if series:
        mi.series, mi.series_index = series, series_index
    if cover:
        mi.cover = cover
    if languages:
        mi.languages = languages
    ids, duplicates = dbctx.run('add', 'empty', read_cover(mi))
    prints(_('Added book ids: %s') % ','.join(map(str, ids)))
Пример #3
0
def do_add_empty(
    dbctx, title, authors, isbn, tags, series, series_index, cover, identifiers,
    languages
):
    mi = MetaInformation(None)
    if title is not None:
        mi.title = title
    if authors:
        mi.authors = authors
    if identifiers:
        mi.set_identifiers(identifiers)
    if isbn:
        mi.isbn = isbn
    if tags:
        mi.tags = tags
    if series:
        mi.series, mi.series_index = series, series_index
    if cover:
        mi.cover = cover
    if languages:
        mi.languages = languages
    ids, duplicates = dbctx.run('add', 'empty', read_cover(mi))
    prints(_('Added book ids: %s') % ','.join(map(str, ids)))
Пример #4
0
def main(opts, args, dbctx):
    if opts.list_fields:
        ans = get_fields(dbctx)
        prints('%-40s' % _('Title'), _('Field name'), '\n')
        for key, m in ans:
            prints('%-40s' % m['name'], key)
        return 0

    def verify_int(x):
        try:
            int(x)
            return True
        except:
            return False

    if len(args) < 1 or not verify_int(args[0]):
        raise SystemExit(
            _('You must specify a record id as the '
              'first argument'))
    if len(args) < 2 and not opts.field:
        raise SystemExit(_('You must specify either a field or an OPF file'))
    book_id = int(args[0])

    if len(args) > 1:
        opf = os.path.abspath(args[1])
        if not os.path.exists(opf):
            raise SystemExit(_('The OPF file %s does not exist') % opf)
        with lopen(opf, 'rb') as stream:
            mi = get_metadata(stream)[0]
        if mi.cover:
            mi.cover = os.path.join(os.path.dirname(opf),
                                    os.path.relpath(mi.cover, getcwd()))
        final_mi = dbctx.run('set_metadata', 'opf', book_id, read_cover(mi))
        if not final_mi:
            raise SystemExit(
                _('No book with id: %s in the database') % book_id)

    if opts.field:
        fields = {k: v for k, v in get_fields(dbctx)}
        fields['title_sort'] = fields['sort']
        vals = {}
        for x in opts.field:
            field, val = x.partition(':')[::2]
            if field == 'sort':
                field = 'title_sort'
            if field not in fields:
                raise SystemExit(_('%s is not a known field' % field))
            if field == 'cover':
                val = dbctx.path(os.path.abspath(os.path.expanduser(val)))
            else:
                val = field_from_string(field, val, fields[field])
            vals[field] = val
        fvals = []
        for field, val in sorted(  # ensure series_index fields are set last
                iteritems(vals),
                key=lambda k: 1 if k[0].endswith('_index') else 0):
            if field.endswith('_index'):
                try:
                    val = float(val)
                except Exception:
                    raise SystemExit(
                        'The value %r is not a valid series index' % val)
            fvals.append((field, val))

        final_mi = dbctx.run('set_metadata', 'fields', book_id, fvals)
        if not final_mi:
            raise SystemExit(
                _('No book with id: %s in the database') % book_id)

    prints(unicode_type(final_mi))
    return 0
Пример #5
0
def main(opts, args, dbctx):
    if opts.list_fields:
        ans = get_fields(dbctx)
        prints('%-40s' % _('Title'), _('Field name'), '\n')
        for key, m in ans:
            prints('%-40s' % m['name'], key)
        return 0

    def verify_int(x):
        try:
            int(x)
            return True
        except:
            return False

    if len(args) < 1 or not verify_int(args[0]):
        raise SystemExit(_(
            'You must specify a record id as the '
            'first argument'
        ))
    if len(args) < 2 and not opts.field:
        raise SystemExit(_('You must specify either a field or an opf file'))
    book_id = int(args[0])

    if len(args) > 1:
        opf = os.path.abspath(args[1])
        if not os.path.exists(opf):
            raise SystemExit(_('The OPF file %s does not exist') % opf)
        with lopen(opf, 'rb') as stream:
            mi = get_metadata(stream)[0]
        if mi.cover:
            mi.cover = os.path.join(os.path.dirname(opf), os.path.relpath(mi.cover, os.getcwdu()))
        final_mi = dbctx.run('set_metadata', 'opf', book_id, read_cover(mi))
        if not final_mi:
            raise SystemExit(_('No book with id: %s in the database') % book_id)

    if opts.field:
        fields = {k: v for k, v in get_fields(dbctx)}
        fields['title_sort'] = fields['sort']
        vals = {}
        for x in opts.field:
            field, val = x.partition(':')[::2]
            if field == 'sort':
                field = 'title_sort'
            if field not in fields:
                raise SystemExit(_('%s is not a known field' % field))
            if field == 'cover':
                val = dbctx.path(os.path.abspath(os.path.expanduser(val)))
            else:
                val = field_from_string(field, val, fields[field])
            vals[field] = val
        fvals = []
        for field, val in sorted(  # ensure series_index fields are set last
                iteritems(vals), key=lambda k: 1 if k[0].endswith('_index') else 0):
            if field.endswith('_index'):
                try:
                    val = float(val)
                except Exception:
                    raise SystemExit('The value %r is not a valid series index' % val)
            fvals.append((field, val))

        final_mi = dbctx.run('set_metadata', 'fields', book_id, fvals)
        if not final_mi:
            raise SystemExit(_('No book with id: %s in the database') % book_id)

    prints(unicode_type(final_mi))
    return 0
Пример #6
0
def do_add(dbctx, paths, one_book_per_directory, recurse, add_duplicates,
           otitle, oauthors, oisbn, otags, oseries, oseries_index, ocover,
           oidentifiers, olanguages, compiled_rules):
    with add_ctx():
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids = [], set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            ids, dups, book_title = dbctx.run(
                'add', 'book', dbctx.path(book), os.path.basename(book), fmt,
                add_duplicates, otitle, oauthors, oisbn, otags, oseries,
                oseries_index,
                read_cover(ocover) if ocover else None, oidentifiers,
                olanguages)
            added_ids |= set(ids)
            if dups:
                file_duplicates.append((book_title, book))

        dir_dups = []
        scanner = cdb_recursive_find if recurse else cdb_find_in_dir
        for dpath in dirs:
            for formats in scanner(dpath, one_book_per_directory,
                                   compiled_rules):
                book_title, ids, dups = dbctx.run(
                    'add', 'format_group', tuple(map(dbctx.path, formats)),
                    add_duplicates)
                if book_title is not None:
                    added_ids |= set(ids)
                    if dups:
                        dir_dups.append((book_title, formats))

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(_('The following books were not added as '
                     'they already exist in the database '
                     '(see --duplicates option):'),
                   file=sys.stderr)
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(
                _('Added book ids: %s') %
                (', '.join(map(type(u''), added_ids))))
Пример #7
0
 def add_books(self, books, **kwargs):
     books = [(read_cover(mi), {k:self.dbctx.path(v) for k, v in fmt_map.iteritems()}) for mi, fmt_map in books]
     return self.dbctx.run('add', 'add_books', books, kwargs)
Пример #8
0
def do_add(
    dbctx, paths, one_book_per_directory, recurse, add_duplicates, otitle, oauthors,
    oisbn, otags, oseries, oseries_index, ocover, oidentifiers, olanguages,
    compiled_rules
):
    orig = sys.stdout
    try:
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids = [], set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            with lopen(book, 'rb') as stream:
                mi = get_metadata(stream, stream_type=fmt, use_libprs_metadata=True)
            if not mi.title:
                mi.title = os.path.splitext(os.path.basename(book))[0]
            if not mi.authors:
                mi.authors = [_('Unknown')]
            if oidentifiers:
                ids = mi.get_identifiers()
                ids.update(oidentifiers)
                mi.set_identifiers(ids)
            for x in ('title', 'authors', 'isbn', 'tags', 'series', 'languages'):
                val = locals()['o' + x]
                if val:
                    setattr(mi, x, val)
            if oseries:
                mi.series_index = oseries_index
            if ocover:
                mi.cover = ocover
                mi.cover_data = (None, None)

            ids, dups = dbctx.run(
                'add', 'book', dbctx.path(book), fmt, read_cover(mi), add_duplicates
            )
            added_ids |= set(ids)
            if dups:
                file_duplicates.append((mi.title, book))

        dir_dups = []
        dbproxy = DBProxy(dbctx)

        for dpath in dirs:
            if recurse:
                dups = recursive_import(
                    dbproxy,
                    dpath,
                    single_book_per_directory=one_book_per_directory,
                    added_ids=added_ids,
                    compiled_rules=compiled_rules,
                    add_duplicates=add_duplicates
                ) or []
            else:
                func = import_book_directory if one_book_per_directory else import_book_directory_multiple
                dups = func(
                    dbproxy,
                    dpath,
                    added_ids=added_ids,
                    compiled_rules=compiled_rules,
                    add_duplicates=add_duplicates
                ) or []
            dir_dups.extend(dups)

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(
                _(
                    'The following books were not added as '
                    'they already exist in the database '
                    '(see --duplicates option):'
                ),
                file=sys.stderr
            )
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(_('Added book ids: %s') % (', '.join(map(type(u''), added_ids))))
    finally:
        sys.stdout = orig
Пример #9
0
 def add_books(self, books, **kwargs):
     books = [(read_cover(mi), {k:self.dbctx.path(v) for k, v in fmt_map.iteritems()}) for mi, fmt_map in books]
     return self.dbctx.run('add', 'add_books', books, kwargs)
Пример #10
0
def do_add(
    dbctx, paths, one_book_per_directory, recurse, add_duplicates, otitle, oauthors,
    oisbn, otags, oseries, oseries_index, ocover, oidentifiers, olanguages,
    compiled_rules
):
    orig = sys.stdout
    try:
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids = [], set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            with lopen(book, 'rb') as stream:
                mi = get_metadata(stream, stream_type=fmt, use_libprs_metadata=True)
            if not mi.title:
                mi.title = os.path.splitext(os.path.basename(book))[0]
            if not mi.authors:
                mi.authors = [_('Unknown')]
            if oidentifiers:
                ids = mi.get_identifiers()
                ids.update(oidentifiers)
                mi.set_identifiers(ids)
            for x in ('title', 'authors', 'isbn', 'tags', 'series', 'languages'):
                val = locals()['o' + x]
                if val:
                    setattr(mi, x, val)
            if oseries:
                mi.series_index = oseries_index
            if ocover:
                mi.cover = ocover
                mi.cover_data = (None, None)

            ids, dups = dbctx.run(
                'add', 'book', dbctx.path(book), fmt, read_cover(mi), add_duplicates
            )
            added_ids |= set(ids)
            if dups:
                file_duplicates.append((mi.title, book))

        dir_dups = []
        dbproxy = DBProxy(dbctx)

        for dpath in dirs:
            if recurse:
                dups = recursive_import(
                    dbproxy,
                    dpath,
                    single_book_per_directory=one_book_per_directory,
                    added_ids=added_ids,
                    compiled_rules=compiled_rules,
                    add_duplicates=add_duplicates
                ) or []
            else:
                func = import_book_directory if one_book_per_directory else import_book_directory_multiple
                dups = func(
                    dbproxy,
                    dpath,
                    added_ids=added_ids,
                    compiled_rules=compiled_rules,
                    add_duplicates=add_duplicates
                ) or []
            dir_dups.extend(dups)

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(
                _(
                    'The following books were not added as '
                    'they already exist in the database '
                    '(see --duplicates option):'
                ),
                file=sys.stderr
            )
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(_('Added book ids: %s') % (', '.join(map(type(u''), added_ids))))
    finally:
        sys.stdout = orig
Пример #11
0
def do_add(
    dbctx, paths, one_book_per_directory, recurse, add_duplicates, otitle, oauthors,
    oisbn, otags, oseries, oseries_index, ocover, oidentifiers, olanguages,
    compiled_rules
):
    with add_ctx():
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids = [], set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            ids, dups, book_title = dbctx.run(
                'add', 'book', dbctx.path(book), os.path.basename(book), fmt, add_duplicates,
                otitle, oauthors, oisbn, otags, oseries, oseries_index, read_cover(ocover) if ocover else None,
                oidentifiers, olanguages
            )
            added_ids |= set(ids)
            if dups:
                file_duplicates.append((book_title, book))

        dir_dups = []
        scanner = cdb_recursive_find if recurse else cdb_find_in_dir
        for dpath in dirs:
            for formats in scanner(dpath, one_book_per_directory, compiled_rules):
                book_title, ids, dups = dbctx.run(
                        'add', 'format_group', tuple(map(dbctx.path, formats)), add_duplicates)
                if book_title is not None:
                    added_ids |= set(ids)
                    if dups:
                        dir_dups.append((book_title, formats))

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(
                _(
                    'The following books were not added as '
                    'they already exist in the database '
                    '(see --duplicates option):'
                ),
                file=sys.stderr
            )
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(_('Added book ids: %s') % (', '.join(map(type(u''), added_ids))))