示例#1
0
 def recursive_import(self,
                      root,
                      single_book_per_directory=True,
                      callback=None,
                      added_ids=None):
     return recursive_import(
         self,
         root,
         single_book_per_directory=single_book_per_directory,
         callback=callback,
         added_ids=added_ids)
示例#2
0
 def recursive_import(self,
                      root,
                      single_book_per_directory=True,
                      callback=None,
                      added_ids=None,
                      compiled_rules=()):
     return recursive_import(
         self,
         root,
         single_book_per_directory=single_book_per_directory,
         callback=callback,
         added_ids=added_ids,
         compiled_rules=compiled_rules)
示例#3
0
文件: cmd_add.py 项目: rakyi/calibre
def do_add(
    dbctx, paths, one_book_per_directory, recurse, add_duplicates, otitle, oauthors,
    oisbn, otags, oseries, oseries_index, ocover, oidentifiers, olanguages,
    compiled_rules
):
    orig = sys.stdout
    try:
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids = [], set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            with lopen(book, 'rb') as stream:
                mi = get_metadata(stream, stream_type=fmt, use_libprs_metadata=True)
            if not mi.title:
                mi.title = os.path.splitext(os.path.basename(book))[0]
            if not mi.authors:
                mi.authors = [_('Unknown')]
            if oidentifiers:
                ids = mi.get_identifiers()
                ids.update(oidentifiers)
                mi.set_identifiers(ids)
            for x in ('title', 'authors', 'isbn', 'tags', 'series', 'languages'):
                val = locals()['o' + x]
                if val:
                    setattr(mi, x, val)
            if oseries:
                mi.series_index = oseries_index
            if ocover:
                mi.cover = ocover
                mi.cover_data = (None, None)

            ids, dups = dbctx.run(
                'add', 'book', dbctx.path(book), fmt, read_cover(mi), add_duplicates
            )
            added_ids |= set(ids)
            if dups:
                file_duplicates.append((mi.title, book))

        dir_dups = []
        dbproxy = DBProxy(dbctx)

        for dpath in dirs:
            if recurse:
                dups = recursive_import(
                    dbproxy,
                    dpath,
                    single_book_per_directory=one_book_per_directory,
                    added_ids=added_ids,
                    compiled_rules=compiled_rules,
                    add_duplicates=add_duplicates
                ) or []
            else:
                func = import_book_directory if one_book_per_directory else import_book_directory_multiple
                dups = func(
                    dbproxy,
                    dpath,
                    added_ids=added_ids,
                    compiled_rules=compiled_rules,
                    add_duplicates=add_duplicates
                ) or []
            dir_dups.extend(dups)

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(
                _(
                    'The following books were not added as '
                    'they already exist in the database '
                    '(see --duplicates option):'
                ),
                file=sys.stderr
            )
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(_('Added book ids: %s') % (', '.join(map(type(u''), added_ids))))
    finally:
        sys.stdout = orig
示例#4
0
def do_add(
    dbctx, paths, one_book_per_directory, recurse, add_duplicates, otitle, oauthors,
    oisbn, otags, oseries, oseries_index, ocover, oidentifiers, olanguages,
    compiled_rules
):
    orig = sys.stdout
    try:
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids = [], set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            with lopen(book, 'rb') as stream:
                mi = get_metadata(stream, stream_type=fmt, use_libprs_metadata=True)
            if not mi.title:
                mi.title = os.path.splitext(os.path.basename(book))[0]
            if not mi.authors:
                mi.authors = [_('Unknown')]
            if oidentifiers:
                ids = mi.get_identifiers()
                ids.update(oidentifiers)
                mi.set_identifiers(ids)
            for x in ('title', 'authors', 'isbn', 'tags', 'series', 'languages'):
                val = locals()['o' + x]
                if val:
                    setattr(mi, x, val)
            if oseries:
                mi.series_index = oseries_index
            if ocover:
                mi.cover = ocover
                mi.cover_data = (None, None)

            ids, dups = dbctx.run(
                'add', 'book', dbctx.path(book), fmt, read_cover(mi), add_duplicates
            )
            added_ids |= set(ids)
            if dups:
                file_duplicates.append((mi.title, book))

        dir_dups = []
        dbproxy = DBProxy(dbctx)

        for dpath in dirs:
            if recurse:
                dups = recursive_import(
                    dbproxy,
                    dpath,
                    single_book_per_directory=one_book_per_directory,
                    added_ids=added_ids,
                    compiled_rules=compiled_rules,
                    add_duplicates=add_duplicates
                ) or []
            else:
                func = import_book_directory if one_book_per_directory else import_book_directory_multiple
                dups = func(
                    dbproxy,
                    dpath,
                    added_ids=added_ids,
                    compiled_rules=compiled_rules,
                    add_duplicates=add_duplicates
                ) or []
            dir_dups.extend(dups)

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(
                _(
                    'The following books were not added as '
                    'they already exist in the database '
                    '(see --duplicates option):'
                ),
                file=sys.stderr
            )
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(_('Added book ids: %s') % (', '.join(map(type(u''), added_ids))))
    finally:
        sys.stdout = orig