Ejemplo n.º 1
0
def do_add(
    dbctx, paths, one_book_per_directory, recurse, add_duplicates, otitle, oauthors,
    oisbn, otags, oseries, oseries_index, ocover, oidentifiers, olanguages,
    compiled_rules
):
    with add_ctx():
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids = [], set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            ids, dups, book_title = dbctx.run(
                'add', 'book', dbctx.path(book), os.path.basename(book), fmt, add_duplicates,
                otitle, oauthors, oisbn, otags, oseries, oseries_index, serialize_cover(ocover) if ocover else None,
                oidentifiers, olanguages
            )
            added_ids |= set(ids)
            if dups:
                file_duplicates.append((book_title, book))

        dir_dups = []
        scanner = cdb_recursive_find if recurse else cdb_find_in_dir
        for dpath in dirs:
            for formats in scanner(dpath, one_book_per_directory, compiled_rules):
                cover_data = None
                for fmt in formats:
                    if fmt.lower().endswith('.opf'):
                        with lopen(fmt, 'rb') as f:
                            mi = get_metadata(f, stream_type='opf')
                            if mi.cover_data and mi.cover_data[1]:
                                cover_data = mi.cover_data[1]
                            elif mi.cover:
                                try:
                                    with lopen(mi.cover, 'rb') as f:
                                        cover_data = f.read()
                                except EnvironmentError:
                                    pass

                book_title, ids, dups = dbctx.run(
                        'add', 'format_group', tuple(map(dbctx.path, formats)), add_duplicates, cover_data)
                if book_title is not None:
                    added_ids |= set(ids)
                    if dups:
                        dir_dups.append((book_title, formats))

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(
                _(
                    'The following books were not added as '
                    'they already exist in the database '
                    '(see --duplicates option):'
                ),
                file=sys.stderr
            )
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(_('Added book ids: %s') % (', '.join(map(unicode_type, added_ids))))
Ejemplo n.º 2
0
def do_add(dbctx, paths, one_book_per_directory, recurse, add_duplicates,
           otitle, oauthors, oisbn, otags, oseries, oseries_index, ocover,
           oidentifiers, olanguages, compiled_rules, oautomerge):
    request_id = uuid4()
    with add_ctx():
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids, merged_ids = [], set(), set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            aids, mids, dups, book_title = dbctx.run(
                'add', 'book', dbctx.path(book), os.path.basename(book), fmt,
                add_duplicates, otitle, oauthors, oisbn, otags, oseries,
                oseries_index,
                serialize_cover(ocover) if ocover else None, oidentifiers,
                olanguages, oautomerge, request_id)
            added_ids |= set(aids)
            merged_ids |= set(mids)

            if dups:
                file_duplicates.append((book_title, book))

        dir_dups = []
        scanner = cdb_recursive_find if recurse else cdb_find_in_dir
        for dpath in dirs:
            for formats in scanner(dpath, one_book_per_directory,
                                   compiled_rules):
                cover_data = None
                for fmt in formats:
                    if fmt.lower().endswith('.opf'):
                        with lopen(fmt, 'rb') as f:
                            mi = get_metadata(f, stream_type='opf')
                            if mi.cover_data and mi.cover_data[1]:
                                cover_data = mi.cover_data[1]
                            elif mi.cover:
                                try:
                                    with lopen(mi.cover, 'rb') as f:
                                        cover_data = f.read()
                                except OSError:
                                    pass

                book_title, ids, mids, dups = dbctx.run(
                    'add', 'format_group', tuple(map(dbctx.path, formats)),
                    add_duplicates, oautomerge, request_id, cover_data)
                if book_title is not None:
                    added_ids |= set(ids)
                    merged_ids |= set(mids)
                    if dups:
                        dir_dups.append((book_title, formats))

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(_('The following books were not added as '
                     'they already exist in the database '
                     '(see --duplicates option or --automerge option):'),
                   file=sys.stderr)
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(_('Added book ids: %s') % (', '.join(map(str, added_ids))))
        if merged_ids:
            prints(
                _('Merged book ids: %s') % (', '.join(map(str, merged_ids))))
Ejemplo n.º 3
0
def do_add(dbctx, paths, one_book_per_directory, recurse, add_duplicates,
           otitle, oauthors, oisbn, otags, oseries, oseries_index, ocover,
           oidentifiers, olanguages, compiled_rules):
    with add_ctx():
        files, dirs = [], []
        for path in paths:
            path = os.path.abspath(path)
            if os.path.isdir(path):
                dirs.append(path)
            else:
                if os.path.exists(path):
                    files.append(path)
                else:
                    prints(path, 'not found')

        file_duplicates, added_ids = [], set()
        for book in files:
            fmt = os.path.splitext(book)[1]
            fmt = fmt[1:] if fmt else None
            if not fmt:
                continue
            ids, dups, book_title = dbctx.run(
                'add', 'book', dbctx.path(book), os.path.basename(book), fmt,
                add_duplicates, otitle, oauthors, oisbn, otags, oseries,
                oseries_index,
                serialize_cover(ocover) if ocover else None, oidentifiers,
                olanguages)
            added_ids |= set(ids)
            if dups:
                file_duplicates.append((book_title, book))

        dir_dups = []
        scanner = cdb_recursive_find if recurse else cdb_find_in_dir
        for dpath in dirs:
            for formats in scanner(dpath, one_book_per_directory,
                                   compiled_rules):
                book_title, ids, dups = dbctx.run(
                    'add', 'format_group', tuple(map(dbctx.path, formats)),
                    add_duplicates)
                if book_title is not None:
                    added_ids |= set(ids)
                    if dups:
                        dir_dups.append((book_title, formats))

        sys.stdout = sys.__stdout__

        if dir_dups or file_duplicates:
            prints(_('The following books were not added as '
                     'they already exist in the database '
                     '(see --duplicates option):'),
                   file=sys.stderr)
            for title, formats in dir_dups:
                prints(' ', title, file=sys.stderr)
                for path in formats:
                    prints('   ', path)
            if file_duplicates:
                for title, path in file_duplicates:
                    prints(' ', title, file=sys.stderr)
                    prints('   ', path)

        if added_ids:
            prints(
                _('Added book ids: %s') %
                (', '.join(map(type(u''), added_ids))))