def refresh(self, type, slug): if type not in ["album", "artist"]: raise ValueError("Invalid type %s supplied" % type) entity = None if type == "album": entity = library_dao.get_album_by_slug(slug) elif type == "artist": entity = library_dao.get_artist_by_slug(slug) if entity is not None: if entity.cover_path is not None and os.path.exists(entity.cover_path): os.remove(entity.cover_path) entity.cover_path = None entity.cover_hash = None entity.cover = None entity.cover_large = None get_database().commit() if type == "album": ws.emit_all("covers.album.update", entity.id) elif type == "artist": ws.emit_all("covers.artist.update", entity.id)
def get_cover(self, type, slug, size="default"): if type not in ["album", "artist"]: raise ValueError("Invalid type %s supplied" % type) entity = None if type == "album": entity = library_dao.get_album_by_slug(slug) if entity is None: raise ValueError("Entity not found") remotes.update_album(entity) if entity.cover_path is None or not os.path.exists(entity.cover_path): try: cherrypy.engine.bgtask.put_unique(self.fetch_album_cover, 15, entity.id) except NonUniqueQueueError: pass elif type == "artist": entity = library_dao.get_artist_by_slug(slug) if entity is None: raise ValueError("Entity not found") remotes.update_artist(entity) if entity.cover_path is None or not os.path.exists(entity.cover_path): try: cherrypy.engine.bgtask.put_unique(self.fetch_artist_cover, 15, entity.id) except NonUniqueQueueError: pass if entity is None: raise ValueError("Entity not found") if entity.cover_path is not None: if entity.cover is None: cover_ext = os.path.splitext(entity.cover_path)[1].decode("utf8") temp_cover = self._mktemp(cover_ext).encode("utf8") temp_cover_large = self._mktemp(cover_ext).encode("utf8") cover = image_service.resize( entity.cover_path, temp_cover, Covers.DEFAULT_WIDTH, Covers.DEFAULT_HEIGHT, Covers.DEFAULT_GRAVITY ) large_offset = self._get_image_offset(Covers.LARGE_WIDTH, Covers.LARGE_HEIGHT, Covers.LARGE_GRAVITY) cover_large = image_service.resize( entity.cover_path, temp_cover_large, Covers.LARGE_WIDTH, Covers.LARGE_HEIGHT, Covers.LARGE_GRAVITY, large_offset, ) if cover and cover_large: import mmh3 with open(temp_cover, "rb") as file: entity.cover = file.read() entity.cover_hash = base64.b64encode(mmh3.hash_bytes(entity.cover)) with open(temp_cover_large, "rb") as file: entity.cover_large = file.read() os.remove(temp_cover) os.remove(temp_cover_large) get_database().commit() return self.guess_mime(entity), entity.cover_large if size == "large" else entity.cover return None, None