def archive(): """Move files from local disk to tar files and update the paths in the db.""" tar_manager = TarManager() _db = db.getdb() try: covers = _db.select('cover', where='archived=$f', order='id', vars={'f': False}) for cover in covers: id = "%010d" % cover.id print 'archiving', cover files = { 'filename': web.storage(name=id + '.jpg', filename=cover.filename), 'filename_s': web.storage(name=id + '-S.jpg', filename=cover.filename_s), 'filename_m': web.storage(name=id + '-M.jpg', filename=cover.filename_m), 'filename_l': web.storage(name=id + '-L.jpg', filename=cover.filename_l), } # required until is coverstore is completely migrated to new code. ensure_thumbnail_created(cover.id, find_image_path(cover.filename)) for d in files.values(): d.path = d.filename and os.path.join(config.data_root, "localdisk", d.filename) if any(d.path is None or not os.path.exists(d.path) for d in files.values()): print >> web.debug, "Missing image file for %010d" % cover.id continue if isinstance(cover.created, basestring): from infogami.infobase import utils cover.created = utils.parse_datetime(cover.created) timestamp = time.mktime(cover.created.timetuple()) for d in files.values(): d.newname = tar_manager.add_file(d.name, open(d.path), timestamp) _db.update('cover', where="id=$cover.id", archived=True, filename=files['filename'].newname, filename_s=files['filename_s'].newname, filename_m=files['filename_m'].newname, filename_l=files['filename_l'].newname, vars=locals() ) for d in files.values(): print 'removing', d.path os.remove(d.path) finally: #logfile.close() tar_manager.close()
def datetime_from_isoformat(expiry): """Returns datetime object, or None""" if expiry is None: return None return parse_datetime(expiry)
def archive(): """Move files from local disk to tar files and update the paths in the db.""" tar_manager = TarManager() _db = db.getdb() try: covers = _db.select('cover', where='archived=$f', order='id', vars={'f': False}) for cover in covers: id = "%010d" % cover.id print('archiving', cover) files = { 'filename': web.storage(name=id + '.jpg', filename=cover.filename), 'filename_s': web.storage(name=id + '-S.jpg', filename=cover.filename_s), 'filename_m': web.storage(name=id + '-M.jpg', filename=cover.filename_m), 'filename_l': web.storage(name=id + '-L.jpg', filename=cover.filename_l), } for d in files.values(): d.path = d.filename and os.path.join(config.data_root, "localdisk", d.filename) if any(d.path is None or not os.path.exists(d.path) for d in files.values()): print("Missing image file for %010d" % cover.id, file=web.debug) continue if isinstance(cover.created, six.string_types): from infogami.infobase import utils cover.created = utils.parse_datetime(cover.created) timestamp = time.mktime(cover.created.timetuple()) for d in files.values(): d.newname = tar_manager.add_file(d.name, open(d.path), timestamp) _db.update('cover', where="id=$cover.id", archived=True, filename=files['filename'].newname, filename_s=files['filename_s'].newname, filename_m=files['filename_m'].newname, filename_l=files['filename_l'].newname, vars=locals()) for d in files.values(): print('removing', d.path) os.remove(d.path) finally: #logfile.close() tar_manager.close()
def __init__(self, book): self.root = self.create_root('entry') bookID = book.key atomID = 'https://openlibrary.org' + bookID + '.opds' title = book.title if book.subtitle: title += " " + book.subtitle updated = parse_datetime(book.last_modified).strftime('%Y-%m-%dT%H:%M:%SZ') work = book.works and book.works[0] if work: authors = work.get_authors() subjects = work.get_subjects() else: authors = book.get_authors() subjects = book.get_subjects() if book.pagination: pages = book.pagination else: pages = book.number_of_pages # the collection and inlibrary check is coped from databarWork.html collection = set() meta_fields = book.get_ia_meta_fields() if meta_fields: collection = meta_fields.get('collection', []) contrib = meta_fields.get('contributor') coverLarge = book.get_cover_url('L') coverThumb = book.get_cover_url('S') self.add('id', atomID) self.create_rel_link(None, 'self', atomID) self.create_rel_link(None, 'alternate', 'https://openlibrary.org'+book.url(), 'text/html') self.add('title', title) self.add('updated', updated) for a in authors: self.add_author(a.name, 'https://openlibrary.org'+a.url()) self.add_list(self.dcterms + 'publisher', book.publishers) self.add_list(self.rdvocab + 'placeOfPublication', book.publish_places) self.add_list(self.dcterms + 'issued', book.publish_date) self.add_list(self.dcterms + 'extent', pages) self.add_list(self.rdvocab + 'dimensions', book.physical_dimensions) self.add_list(self.bibo + 'edition', book.edition_name) for subject in subjects: self.add_category('/subjects/'+subject.lower().replace(' ', '_').replace(',',''), subject) self.add_list('summary', book.description) self.add_list(self.rdvocab + 'note', book.notes) for lang in book.languages: self.add_list(self.dcterms + 'language', lang.code) self.add_list(self.dcterms + 'identifier', book.key, 'https://openlibrary.org', {self.xsi+'type':'dcterms:URI'}) self.add_list(self.dcterms + 'identifier', book.ocaid, 'https://archive.org/details/', {self.xsi+'type':'dcterms:URI'}) self.add_list(self.dcterms + 'identifier', book.isbn_10, 'urn:ISBN:', {self.xsi+'type':'dcterms:ISBN'}) self.add_list(self.dcterms + 'identifier', book.isbn_13, 'urn:ISBN:', {self.xsi+'type':'dcterms:ISBN'}) self.add_list(self.bibo + 'oclcnum', book.oclc_numbers) self.add_list(self.bibo + 'lccn', book.lccn) if coverLarge: self.create_rel_link(None, 'http://opds-spec.org/image', coverLarge, 'image/jpeg') if coverThumb: self.create_rel_link(None, 'http://opds-spec.org/image/thumbnail', coverThumb, 'image/jpeg') self.add_acquisition_links(book, collection) self.add_rel_links(book, work)