Ejemplo n.º 1
0
def add_by_webpage(url, work=None, user=None):
    edition = None
    scraper = get_scraper(url)
    loader = BasePandataLoader(url)
    pandata = Pandata()
    pandata.metadata = scraper.metadata
    for metadata in pandata.get_edition_list():
        edition = loader.load_from_pandata(metadata, work)
        work = edition.work
    loader.load_ebooks(pandata, edition, user=user)
    return edition if edition else None
Ejemplo n.º 2
0
def add_by_sitemap(url, maxnum=None):
    editions = []
    for bookdata in scrape_sitemap(url, maxnum=maxnum):
        edition = work = None
        loader = BasePandataLoader(bookdata.base)
        pandata = Pandata()
        pandata.metadata = bookdata.metadata
        for metadata in pandata.get_edition_list():
            edition = loader.load_from_pandata(metadata, work)
            work = edition.work
        loader.load_ebooks(pandata, edition)
        if edition:
            editions.append(edition)
    return editions
Ejemplo n.º 3
0
def add_from_bookdatas(bookdatas):
    ''' bookdatas  are iterators of scrapers '''
    editions = []
    for bookdata in bookdatas:
        edition = work = None
        loader = BasePandataLoader(bookdata.base)
        pandata = Pandata()
        pandata.metadata = bookdata.metadata
        for metadata in pandata.get_edition_list():
            edition = loader.load_from_pandata(metadata, work)
            work = edition.work
        loader.load_ebooks(pandata, edition)
        if edition:
            editions.append(edition)
    return editions