def switch_to_next(self): id = self.__cur_scaper_id + 1 if id in range(0, self._num_scrapers): self.__cur_scaper_id = id else: self.__cur_scaper_id = 0 log("manager.switch_to_next id=%d" % self.__cur_scaper_id)
def switch_to_previous(self): id = self.__cur_scaper_id - 1 if id in range(0, self._num_scrapers): self.__cur_scaper_id = id else: self.__cur_scaper_id = self._num_scrapers - 1 log("manager.switch_to_previous id=%d" % self.__cur_scaper_id)
def decode_params(): params = {} p = urlparse.parse_qs(sys.argv[2][1:]) for key, value in p.iteritems(): params[key] = value[0] log('plugin.decode_params got params=%s' % params) return params
def __write_cache(self): log("manager.__write_cache started") if not os.path.isdir(CACHE_PATH): os.makedirs(CACHE_PATH) cache_file = os.path.join(CACHE_PATH, "cache.json") json.dump(self.__cache, open(cache_file, "w"), indent=1) log("manager.__write_cache finished")
def get_scrapers(): log('plugin.get_scrapers started') manager = get_scraper_manager() scrapers = manager.get_scrapers() return [{'title': scraper['title'], 'pic': '', 'id': scraper['id']} for scraper in scrapers]
def get_photos(self, album_url): element_id = "photos_%d_%s" % (self.__cur_scaper_id, album_url) photos = self.__get_cache(element_id, MAX_AGE_PHOTOS) if not photos: photos = self.__current_scraper()._get_photos(album_url) self.__set_cache(element_id, photos) log("manager.get_photos got %d items" % len(photos)) return photos
def get_albums(self): element_id = "albums_%d" % self.__cur_scaper_id albums = self.__get_cache(element_id, MAX_AGE_ALBUMS) if not albums: albums = self.__current_scraper()._get_albums() self.__set_cache(element_id, albums) log("manager.get_albums got %d items" % len(albums)) return albums
def get_photos(scraper_id, album_url): log('plugin.get_photos started with scraper_id=%s, album_url=%s' % (scraper_id, album_url)) manager = get_scraper_manager() manager.switch_to_given_id(scraper_id) photos = manager.get_photos(album_url) return [{'title': photo['title'], 'pic': photo['pic']} for photo in photos]
def get_albums(scraper_id): log('plugin.get_albums started with scraper_id=%s' % scraper_id) manager = get_scraper_manager() manager.switch_to_given_id(scraper_id) albums = manager.get_albums() return [{'title': album['title'], 'pic': album['pic'], 'id': album['album_url']} for album in albums]
def show_photos(scraper_id, album_url): log('plugin.show_photos started with scraper_id=%s, album_url=%s' % (scraper_id, album_url)) photos = get_photos(scraper_id, album_url) for photo in photos: liz = xbmcgui.ListItem(photo['title'], iconImage='DefaultImage.png', thumbnailImage=photo['pic']) liz.setInfo(type='image', infoLabels={'Title': photo['title']}) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=photo['pic'], listitem=liz, isFolder=False)
def showPhotos(self): log('script.showPhotos started') self.last_seen_album_id = int(self.getProperty('album_id')) self.current_mode = 'photos' w = xbmcgui.Window(xbmcgui.getCurrentWindowId()) w.setProperty('Category', 'Photo') album_url = self.getProperty('album_url') self.getControl(self.CONTROL_MAIN_IMAGE).reset() photos = self.ScraperManager.get_photos(album_url) self.addItems(photos) log('script.showPhotos finished')
def addItems(self, items): log('script.addItems started') for item in items: li = xbmcgui.ListItem(label=item['title'], label2=item['description'], iconImage=item['pic']) li.setProperty('album', self.ScraperManager.scraper_title) if 'album_url' in item.keys(): li.setProperty('album_url', item['album_url']) if 'album_id' in item.keys(): li.setProperty('album_id', str(item['album_id'])) self.getControl(self.CONTROL_MAIN_IMAGE).addItem(li)
def showAlbums(self, switch_to_album_id=0): log('script.showAlbums started with switch to album_id: %s' % switch_to_album_id) self.current_mode = 'albums' w = xbmcgui.Window(xbmcgui.getCurrentWindowId()) w.setProperty('Category', 'Album') self.getControl(self.CONTROL_MAIN_IMAGE).reset() albums = self.ScraperManager.get_albums() self.addItems(albums) if switch_to_album_id: c = self.getControl(self.CONTROL_MAIN_IMAGE) c.selectItem(switch_to_album_id) log('script.showAlbums finished')
def show_scrapers(): log('plugin.show_scrapers started') scrapers = get_scrapers() for scraper in scrapers: liz = xbmcgui.ListItem(scraper['title'], iconImage='DefaultImage.png', thumbnailImage='DefaultFolder.png') params = {'mode': 'albums', 'scraper_id': scraper['id']} url = 'plugin://%s/?%s' % (Addon.getAddonInfo('id'), urllib.urlencode(params)) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=url, listitem=liz, isFolder=True) log('plugin.show_scrapers finished')
def onInit(self): log('script.onInit started') self.show_info = True self.aspect_keep = True self.last_seen_album_id = 0 if Addon.getSetting('show_arrows') == 'false': self.getControl(self.CONTROL_ARROWS).setVisible(False) if Addon.getSetting('aspect_ratio2') == '0': self.getControl(self.CONTROL_ASPECT_KEEP).setVisible(False) self.showHelp() self.showAlbums() self.setFocus(self.getControl(self.CONTROL_MAIN_IMAGE)) log('script.onInit finished')
def show_albums(scraper_id): log('plugin.show_albums started with scraper_id=%s' % scraper_id) albums = get_albums(scraper_id) for album in albums: liz = xbmcgui.ListItem(album['title'], iconImage='DefaultImage.png', thumbnailImage=album['pic']) liz.setInfo(type='image', infoLabels={'Title': album['title']}) params = {'mode': 'photos', 'scraper_id': scraper_id, 'album_url': album['id']} url = 'plugin://%s/?%s' % (Addon.getAddonInfo('id'), urllib.urlencode(params)) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=url, listitem=liz, isFolder=True) log('plugin.show_albums finished')
def __read_cache(self): log("manager.__read_cache started") cache_file = os.path.join(CACHE_PATH, "cache.json") if os.path.isfile(cache_file): try: c = json.load(open(cache_file, "r")) if "version" in c and c["version"] == CACHE_VERSION: self.__cache = c else: log("manager.__read_cache cache version to old") self.__recreate_cache() except ValueError: log('manager.__read_cache could not read: "%s"' % cache_file) self.__recreate_cache() else: log('manager.__read_cache file does not exist: "%s"' % cache_file) self.__recreate_cache() log("manager.__read_cache finished")
def _get_tree(self, url): log("scraper._get_tree started with url=%s" % url) req = urllib2.Request(url) try: html = urllib2.urlopen(req).read() log("scraper._get_tree got web response") except urllib2.HTTPError, error: log("HTTPError: %s" % error)
def __get_cache(self, element_id, max_age): log("manager.__get_cache started with element_id:%s" % element_id) if element_id in self.__cache["content"]: log("manager.__get_cache found element") element = self.__cache["content"][element_id] if max_age and time.time() - element["timestamp"] > max_age: log("manager.__get_cache element too old") else: return element["data"]
def run(): p = decode_params() if not 'mode' in p: log('plugin.run started in scrapers-mode') show_scrapers() elif p['mode'] == 'albums': scraper_id = int(p['scraper_id']) log('plugin.run started in albums-mode') show_albums(scraper_id) elif p['mode'] == 'photos': log('plugin.run started in photos-mode') scraper_id = int(p['scraper_id']) album_url = p['album_url'] show_photos(scraper_id, album_url) xbmcplugin.endOfDirectory(int(sys.argv[1]))
def download_album(self): log('script.download_album started') download_path = Addon.getSetting('download_path') if not download_path: s = Addon.getLocalizedString(32300) # Choose default download path new_path = xbmcgui.Dialog().browse(3, s, 'pictures') if not new_path: return else: download_path = new_path Addon.setSetting('download_path', download_path) log('script.download_album using download_path="%s"' % download_path) album_url = self.getProperty('album_url') items = self.ScraperManager.get_photos(album_url) downloader.Downloader(items, download_path) log('script.download_album finished')
def startSlideshow(self): log('script.startSlideshow started') params = {} params['scraper_id'] = self.ScraperManager.scraper_id params['mode'] = 'photos' params['album_url'] = self.getProperty('album_url') if Addon.getSetting('random_slideshow') == 'true': random = 'random' else: random = 'notrandom' url = 'plugin://%s/?%s' % (Addon.getAddonInfo('id'), urllib.urlencode(params)) log('script.startSlideshow using url=%s' % url) xbmc.executebuiltin('Slideshow(%s, recursive, %s)' % (url, random)) log('script.startSlideshow finished')
def __init__(self, skin_file, addon_path): log('script.__init__ started') self.ScraperManager = ScraperManager(SCRAPERS_PATH)
def __set_cache(self, element_id, element_data): log("manager.__set_cache started with element_id:%s" % element_id) self.__cache["content"][element_id] = {"timestamp": time.time(), "data": element_data} self.__write_cache()
def __init__(self, scrapers_path): log("manager.__init__") self.__scrapers = self.__get_scrapers(scrapers_path) self.__cur_scaper_id = 0 self.__read_cache() self._num_scrapers = len(self.__scrapers)
def switch_to_given_id(self, id): if id in range(0, self._num_scrapers): self.__cur_scaper_id = id log("manager.switch_to_given_id id=%d" % self.__cur_scaper_id)
def __recreate_cache(self): log("manager.__recreate_cache version: %d" % CACHE_VERSION) self.__cache = {"version": CACHE_VERSION, "content": {}}
def __init__(self, photos, download_path): self.len = len(photos) log("downloader.__init__ with %d items and path=%s" % (self.len, download_path)) self.pDialog = xbmcgui.DialogProgress() self.pDialog.create(Addon.getAddonInfo("name")) s = Addon.getLocalizedString(32301) # Gathering Data... self.pDialog.update(0, s) album_title = photos[0]["album_title"] self.sub_folder = re.sub("[^\w\- ]", "", album_title).replace(" ", "_") self.full_path = os.path.join(download_path, self.sub_folder) log('script.download_album using full_path="%s"' % self.full_path) self.__create_folder(self.full_path) for i, photo in enumerate(photos): self.current_item = i + 1 url = photo["pic"] self.current_file = photo["pic"].split("/")[-1].split("?")[0] filename = os.path.join(self.full_path, self.current_file) log('downloader: Downloading "%s" to "%s"' % (url, filename)) try: urllib.urlretrieve(url, filename, self.update_progress) except IOError, e: log('downloader: ERROR: "%s"' % str(e)) break log("downloader: Done") if self.pDialog.iscanceled(): log("downloader: Canceled") break