def test_collapseuser(self): self.assertEquals("~/.config/variety", Util.collapseuser("/home/peter/.config/variety")) self.assertEquals("/home/peteraaa/.config/variety", Util.collapseuser("/home/peteraaa/.config/variety")) self.assertEquals("/media/.config/variety", Util.collapseuser("/media/.config/variety"))
def fill_queue(self): page = random.randint(1, 250) url = 'https://api.unsplash.com/photos/?page=%d&per_page=30&client_id=%s' % (page, UnsplashDownloader.CLIENT_ID) logger.info(lambda: "Filling Unsplash queue from " + url) r = Util.request(url) if int(r.headers.get('X-Ratelimit-Remaining', 1000000)) < 100: UnsplashDownloader.rate_limiting_started_time = time.time() for item in r.json(): try: width = item['width'] height = item['height'] if self.parent and not self.parent.size_ok(width, height): continue image_url = item['links']['download'] origin_url = item['links']['html'] filename = os.path.join(self.target_folder, Util.sanitize_filename(image_url.split('/')[-2] + '.jpg')) extra_metadata = { 'sourceType': 'unsplash', 'sfwRating': 100, 'author': item['user']['name'], 'authorURL': item['user']['links']['html'], 'keywords': [cat['title'].lower().strip() for cat in item['categories']] } self.queue.append((origin_url, image_url, extra_metadata, filename)) except: logger.exception(lambda: "Could not process an item from Unsplash") raise random.shuffle(self.queue) logger.info(lambda: "Unsplash populated with %d URLs" % len(self.queue))
def _autoscroll_thread(self): last_update = time.time() while self.running: while not self.mouse_in: if not self.running: return self.autoscroll_event.wait(10) time.sleep(max(0, last_update + 0.005 - time.time())) if not self.mouse_position or self.scrolling_paused: continue x = self.mouse_position[0] y = self.mouse_position[1] def _go(): if self.is_horizontal() and y > 0: self.autoscroll_step(self.scroll.get_hadjustment(), self.scroll.get_min_content_width(), x) elif not self.is_horizontal() and x > 0: self.autoscroll_step(self.scroll.get_vadjustment(), self.scroll.get_min_content_height(), y) Util.add_mainloop_task(_go) last_update = time.time()
def _set_rating(widget, rating=rating): try: Util.set_rating(file, rating) main_window.on_rating_changed(file) except Exception: logger.exception(lambda: "Could not set EXIF rating") main_window.show_notification(_("Could not set EXIF rating"))
def _smart_fn(filename): try: image_w, image_h = Util.get_size(filename) primary_w, primary_h = Util.get_primary_display_size(hidpi_scaled=True) total_w, total_h = Util.get_multimonitor_display_size() if image_w * image_h * 10 < primary_w * primary_h: # image way smaller than primary monitor, tile it return DisplayModeData(set_wallpaper_param="wallpaper") else: image_ratio = image_w / image_h primary_ratio = primary_w / primary_h total_ratio = total_w / total_h if 2 * abs(image_ratio - primary_ratio) / (image_ratio + primary_ratio) < 0.2: # image ratio is close to primary screen ratio, zoom return DisplayModeData(set_wallpaper_param="zoom") elif 2 * abs(image_ratio - total_ratio) / (image_ratio + total_ratio) < 0.2: # image ratio is close to multimonitor total screen ratio, span it return DisplayModeData(set_wallpaper_param="spanned") else: # image ratio not close to screen ratio, fit with a blurred background cmd = IMAGEMAGICK_FIT_WITH_BLUR.replace( "%W", str(primary_w)).replace("%H", str(primary_h)) return DisplayModeData(set_wallpaper_param="zoom", imagemagick_cmd=cmd) except: return DisplayModeData(set_wallpaper_param="zoom")
def fill_queue(self): if time.time() - UnsplashDownloader.rate_limiting_started_time < 3600: logger.info( lambda: "Unsplash queue empty, but rate limit reached, will try again later" ) return [] url = self.get_unsplash_api_url() logger.info(lambda: "Filling Unsplash queue from " + url) r = Util.request(url) if int(r.headers.get("X-Ratelimit-Remaining", 1000000)) < 1000: UnsplashDownloader.rate_limiting_started_time = time.time() queue = [] for item in r.json(): try: width = item["width"] height = item["height"] if self.is_size_inadequate(width, height): continue image_url = item["urls"]["full"] + "&w={}".format( max(1980, int(Util.get_primary_display_size()[0] * 1.2))) origin_url = item["links"][ "html"] + UnsplashDownloader.UTM_PARAMS extra_metadata = { "sourceType": "unsplash", "sfwRating": 100, "author": item["user"]["name"], "authorURL": item["user"]["links"]["html"] + UnsplashDownloader.UTM_PARAMS, "keywords": [ cat["title"].lower().strip() for cat in item["categories"] ], "extraData": { "unsplashDownloadLocation": item["links"]["download_location"], "unsplashDownloadReported": False, }, } queue.append((origin_url, image_url, extra_metadata)) except: logger.exception( lambda: "Could not process an item from Unsplash") raise random.shuffle(queue) return queue
def test_guess_image_url(self): self.assertEquals('https://farm5.staticflickr.com/4032/4558166441_4e34855b39_o.jpg', Util.guess_image_url({'sourceURL': 'https://www.flickr.com/photos/83646108@N00/4558166441'})) self.assertEquals('https://farm5.staticflickr.com/4077/4768189432_24275ea76b_b.jpg', Util.guess_image_url({'sourceURL': 'http://www.flickr.com/photos/52821721@N00/4768189432'})) self.assertEquals('http://fc04.deviantart.net/fs71/i/2011/319/4/f/scarlet_leaf_wallpaper_by_venomxbaby-d4gc238.jpg', Util.guess_image_url({'sourceURL': 'http://fc04.deviantart.net/fs71/i/2011/319/4/f/scarlet_leaf_wallpaper_by_venomxbaby-d4gc238.jpg'}))
def save_locally(self, origin_url, image_url, source_type=None, source_location=None, source_name=None, force_download=False, extra_metadata={}, local_filename=None): if not source_type: source_type = self.source_type if not source_name: source_name = self.name if not source_location: source_location = self.location if not force_download and self.parent and origin_url in self.parent.banned: logger.info(lambda: "URL " + origin_url + " is banned, skip downloading") return None try: os.makedirs(self.target_folder) except Exception: pass if origin_url.startswith('//'): origin_url = 'https:' + origin_url if image_url.startswith('//'): image_url = origin_url.split('//')[0] + image_url if not local_filename: local_filename = self.get_local_filename(image_url) logger.info(lambda: "Origin URL: " + origin_url) logger.info(lambda: "Image URL: " + image_url) logger.info(lambda: "Local name: " + local_filename) if not force_download and os.path.exists(local_filename): logger.info(lambda: "File already exists, skip downloading") return None if self.parent and self.parent.options.safe_mode: sfw_rating = Smart.get_sfw_rating(origin_url) if sfw_rating is not None and sfw_rating < 100: logger.info(lambda: "Skipping non-safe download %s. Is the source %s:%s " "suitable for Safe mode?" % (origin_url, source_type, self.location)) return None if self.parent and self.parent.options.safe_mode and 'keywords' in extra_metadata: blacklisted = set(k.lower() for k in extra_metadata['keywords']) & Smart.get_safe_mode_keyword_blacklist() if len(blacklisted) > 0: logger.info(lambda: "Skipping non-safe download %s due to blacklisted keywords (%s). " "Is the source %s:%s suitable for Safe mode?" % (origin_url, str(blacklisted), source_type, self.location)) return None try: r = Util.request(image_url, stream=True) with open(local_filename, 'wb') as f: Util.request_write_to(r, f) except Exception, e: logger.info(lambda: "Download failed from image URL: %s (source location: %s) " % (image_url, self.location)) raise e
def test_guess_image_url(self): self.assertEqual('https://farm5.staticflickr.com/4032/4558166441_4e34855b39_o.jpg', Util.guess_image_url({'sourceURL': 'https://www.flickr.com/photos/83646108@N00/4558166441'})) self.assertEqual('https://farm5.staticflickr.com/4077/4768189432_24275ea76b_b.jpg', Util.guess_image_url({'sourceURL': 'http://www.flickr.com/photos/52821721@N00/4768189432'})) self.assertEqual('http://fc04.deviantart.net/fs71/i/2011/319/4/f/scarlet_leaf_wallpaper_by_venomxbaby-d4gc238.jpg', Util.guess_image_url({'sourceURL': 'http://fc04.deviantart.net/fs71/i/2011/319/4/f/scarlet_leaf_wallpaper_by_venomxbaby-d4gc238.jpg'}))
def test_is_dead_or_not_image(self): self.assertTrue(Util.is_dead_or_not_image(None)) self.assertTrue(Util.is_dead_or_not_image('not a URL')) self.assertTrue(Util.is_dead_or_not_image('http://www.google.com/')) self.assertTrue(Util.is_dead_or_not_image('http://vrty.org/')) self.assertTrue(Util.is_dead_or_not_image('http://www.google.com/dejkjdrelkjflkrejfjre')) self.assertFalse(Util.is_dead_or_not_image('http://upload.wikimedia.org/wikipedia/commons/5/53/Wikipedia-logo-en-big.png')) self.assertFalse(Util.is_dead_or_not_image('https://farm8.staticflickr.com/7133/7527967878_85fea93129_o.jpg')) self.assertFalse(Util.is_dead_or_not_image('http://interfacelift.com/wallpaper/D98ef829/00899_rustedbolt_2560x1600.jpg')) self.assertTrue(Util.is_dead_or_not_image('http://wallpapers.wallbase.cc/rozne/wallpaper-1227671.jpg')) self.assertTrue(Util.is_dead_or_not_image('http://ns223506.ovh.net/rozne/a1b2/wallpaper-1996019.png'))
def on_image_set_as_wallpaper(self, img, meta): extraData = meta.get("extraData", None) if not extraData: return download_loc = extraData.get("unsplashDownloadLocation") reported = extraData.get("unsplashDownloadReported") if download_loc and not reported: url = "{}?client_id={}".format(download_loc, UnsplashDownloader.CLIENT_ID) Util.fetch(url) meta["extraData"]["unsplashDownloadReported"] = True Util.write_metadata(img, meta)
def setWallpaperHook(img, meta): extraData = meta.get('extraData', None) if not extraData: return download_loc = extraData.get('unsplashDownloadLocation') reported = extraData.get('unsplashDownloadReported') if download_loc and not reported: url = '{}?client_id={}'.format(download_loc, UnsplashDownloader.CLIENT_ID) Util.fetch(url) meta['extraData']['unsplashDownloadReported'] = True Util.write_metadata(img, meta)
def check_quit(): global terminate if not terminate: GObject.timeout_add(1000, check_quit) return logging.getLogger("variety").info("Terminating signal received, quitting...") safe_print(_("Terminating signal received, quitting..."), "Terminating signal received, quitting...") global VARIETY_WINDOW if VARIETY_WINDOW: GObject.idle_add(VARIETY_WINDOW.on_quit) Util.start_force_exit_thread(10)
def get_for_keyword(self, keyword): logger.info(lambda: "Fetching quotes from Goodreads for keyword=%s" % keyword) url = iri2uri(u"https://www.goodreads.com/quotes/tag?utf8=\u2713&id=%s" % keyword) soup = Util.html_soup(url) page_links = list(Util.safe_map(int, [pagelink.contents[0] for pagelink in soup.find_all(href=re.compile('quotes/tag.*page='))])) if page_links: page = random.randint(1, max(page_links)) url = iri2uri(u"https://www.goodreads.com/quotes/tag?utf8=\u2713&id=%s&page=%d" % (keyword, page)) soup = Util.html_soup(url) return self.get_from_soup(url, soup)
def get_for_author(self, author): logger.info(lambda: "Fetching quotes from Goodreads for author=%s" % author) url = iri2uri(u"https://www.goodreads.com/quotes/search?utf8=\u2713&q=%s" % author) soup = Util.html_soup(url) page_links = list(Util.safe_map(int, [pagelink.contents[0] for pagelink in soup.find_all(href=re.compile('quotes/search.*page='))])) if page_links: page = random.randint(1, max(page_links)) url = iri2uri(u"https://www.goodreads.com/quotes/search?utf8=\u2713&q=%s&page=%d" % (author, page)) soup = Util.html_soup(url) return self.get_from_soup(url, soup)
def _thumbs_thread(self): logger.debug(lambda: "Starting thumb thread %s, %d" % (str(self), time.time())) try: self.total_width = 0 for i, file in enumerate(self.images): if not self.running: Util.add_mainloop_task(self.destroy) return self.add_image(file, at_front=False) self.image_count = i except Exception: logger.exception(lambda: "Error while creating thumbs:")
def fill_queue(self): page = random.randint(1, 250) url = 'https://api.unsplash.com/photos/?page=%d&per_page=30&client_id=%s' % ( page, UnsplashDownloader.CLIENT_ID) logger.info(lambda: "Filling Unsplash queue from " + url) r = Util.request(url) if int(r.headers.get('X-Ratelimit-Remaining', 1000000)) < 100: UnsplashDownloader.rate_limiting_started_time = time.time() for item in r.json(): try: width = item['width'] height = item['height'] if self.parent and not self.parent.size_ok(width, height): continue image_url = item['links']['download'] origin_url = item['links']['html'] filename = os.path.join( self.target_folder, Util.sanitize_filename(image_url.split('/')[-2] + '.jpg')) extra_metadata = { 'sourceType': 'unsplash', 'sfwRating': 100, 'author': item['user']['name'], 'authorURL': item['user']['links']['html'], 'keywords': [ cat['title'].lower().strip() for cat in item['categories'] ] } self.queue.append( (origin_url, image_url, extra_metadata, filename)) except: logger.exception( lambda: "Could not process an item from Unsplash") raise random.shuffle(self.queue) logger.info( lambda: "Unsplash populated with %d URLs" % len(self.queue))
def fill_queue(self): logger.info(lambda: "Filling Bing queue from " + self.location) s = Util.fetch_json(BingDownloader.BING_JSON_URL) for item in s['images']: try: image_url = 'https://www.bing.com' + item['url'] filename = item['url'].split('/')[-1] name = filename[0:filename.find('_EN')] src_url = 'https://www.bing.com/gallery/#images/%s' % name try: date = datetime.strptime(item['startdate'], '%Y%m%d').strftime('%Y-%m-%d') except: date = item['startdate'] extra_metadata = { 'sourceType': 'bing', 'sfwRating': 100, 'headline': 'Bing Photo of the Day, %s' % date, 'description': item['copyright'], } self.queue.append((src_url, image_url, extra_metadata)) except: logger.exception(lambda: "Could not process an item in the Bing json result") random.shuffle(self.queue) logger.info(lambda: "Bing queue populated with %d URLs" % len(self.queue))
def get_image_url(origin_url): photo_id = FlickrDownloader.get_photo_id(origin_url) call = 'https://api.flickr.com/services/rest/?method=flickr.photos.getSizes&api_key=%s&photo_id=%s&format=json&nojsoncallback=1' % \ (API_KEY, photo_id) resp = Util.fetch_json(call) s = max(resp['sizes']['size'], key=lambda size: int(size['width'])) return s['source']
def fill_queue(self): logger.info(lambda: "ArtStation URL: " + self.config) queue = [] # json_url = ArtStationDownloader.build_json_url(self.config) url = self.config s = Util.html_soup(url) author = s.find("channel").find("title").get_text().strip() author_url = s.find("channel").find("link").next.strip() items = s.findAll("item") for index, item in enumerate(items): try: extra_metadata = { "headline": item.find("title").get_text().strip(), "description": item.find("description").get_text().strip().replace("]]>", ""), "author": author, "authorURL": author_url, } src_url = item.find("guid").text + "#" + str(index) image_urls = [img["src"] for img in item.findAll("img")] for image_url in image_urls: queue.append((src_url, image_url, extra_metadata)) except Exception: logger.exception(lambda: "Could not process an item in the ArtStation rss result") random.shuffle(queue) return queue
def fill_queue(self): logger.info(lambda: "Reddit URL: " + self.location) json_url = RedditDownloader.build_json_url(self.location) s = Util.fetch_json(json_url) for item in s['data']['children']: try: data = item['data'] image_url = data['url'] if re.match(r'^http(s)?://imgur\.com/\w+$', image_url): image_url = image_url.replace('://', '://i.') + '.jpg' if image_url.lower().endswith(('.jpg', '.jpeg', '.png')): src_url = 'https://www.reddit.com' + data['permalink'] extra_metadata = {'sourceType': 'reddit'} if data['over_18']: extra_metadata['sfwRating'] = 0 if self.parent and self.parent.options.safe_mode: continue self.queue.append((src_url, image_url, extra_metadata)) except Exception: logger.exception( lambda: "Could not process an item in the Reddit json result") random.shuffle(self.queue) logger.info( lambda: "Reddit queue populated with %d URLs" % len(self.queue))
def download_queue_item(self, queue_item): wallpaper_url = queue_item logger.info(lambda: "Wallpaper URL: " + wallpaper_url) s = Util.html_soup(wallpaper_url) src_url = s.find("img", id="wallpaper")["src"] logger.info(lambda: "Image src URL: " + src_url) extra_metadata = {} try: extra_metadata["keywords"] = [ el.text.strip() for el in s.find_all("a", {"class": "tagname"}) ] except: pass try: purity = s.find("div", "sidebar-content").find("label", "purity").text.lower() sfw_rating = {"sfw": 100, "sketchy": 50, "nsfw": 0}[purity] extra_metadata["sfwRating"] = sfw_rating if self.is_safe_mode_enabled() and sfw_rating < 100: logger.info( lambda: "Skipping non-safe download from Wallhaven. " "Is the source %s suitable for Safe mode?" % self.config) return None except: pass return self.save_locally(wallpaper_url, src_url, extra_metadata=extra_metadata)
def set_icon(self, icon): def set_from_theme_icon(name): if self.indicator: logger.info(lambda: "Showing indicator icon %s from GTK theme" % name) self.indicator.set_icon(name) if self.status_icon: logger.info(lambda: "Showing status icon %s from GTK theme" % name) self.status_icon.set_from_icon_name(name) if icon == "Light": if Gtk.IconTheme.get_default().has_icon(THEME_ICON_NAME): set_from_theme_icon(THEME_ICON_NAME) return else: icon_path = varietyconfig.get_data_file("media", "variety-indicator.png") elif icon == "Dark": if Gtk.IconTheme.get_default().has_icon(THEME_ICON_NAME_DARK): set_from_theme_icon(THEME_ICON_NAME_DARK) return else: icon_path = varietyconfig.get_data_file("media", "variety-indicator-dark.png") elif icon and os.access(icon, os.R_OK) and Util.is_image(icon): icon_path = icon else: icon_path = varietyconfig.get_data_file("media", "variety-indicator.png") if self.indicator: logger.info(lambda: "Showing indicator icon image: " + icon_path) self.indicator.set_icon(icon_path) if self.status_icon: logger.info(lambda: "Showing status icon image: " + icon_path) self.status_icon.set_from_file(icon_path)
def test_guess_source_type(self): self.assertEquals(None, Util.guess_source_type({})) self.assertEquals( 'mediarss', Util.guess_source_type({ 'sourceName': 'host.com', 'sourceLocation': 'http://host.com/rss' })) self.assertEquals( 'mediarss', Util.guess_source_type({ 'sourceName': 'camelid.deviantart.com', 'sourceLocation': 'http://backend.deviantart.com/rss.xml?type=deviation&q=by%3ACamelid+sort%3Atime+meta%3Aall' }))
def fill_queue(self): logger.info(lambda: "Reddit URL: " + self.location) json_url = RedditDownloader.build_json_url(self.location) s = Util.fetch_json(json_url) for item in s['data']['children']: try: data = item['data'] image_url = data['url'] if re.match(r'^http(s)?://imgur\.com/\w+$', image_url): image_url = image_url.replace('://', '://i.') + '.jpg' if image_url.lower().endswith(('.jpg', '.jpeg', '.png')): src_url = 'https://www.reddit.com' + data['permalink'] extra_metadata = {'sourceType': 'reddit'} if data['over_18']: extra_metadata['sfwRating'] = 0 if self.parent and self.parent.options.safe_mode: continue self.queue.append((src_url, image_url, extra_metadata)) except Exception: logger.exception(lambda: "Could not process an item in the Reddit json result") random.shuffle(self.queue) logger.info(lambda: "Reddit queue populated with %d URLs" % len(self.queue))
def test_safe_map(self): def f(i): if i <= 10: raise Exception return i self.assertEquals([20, 30], list(Util.safe_map(f, [1, 5, 20, 10, 30, 4])))
def ajax(self, url, data, error_msg_handler): try: return Util.fetch_json(url, data) except requests.exceptions.HTTPError, e: logger.exception(lambda: 'HTTPError for ' + url) error_msg_handler(_('Oops, server returned error (%s)') % e.response.status_code) raise
def populate_sfw_menu(self): try: self.rating_items = [] sfw_ratings = Smart.get_all_sfw_ratings() def _gui_update(sfw_ratings): try: def _add_menuitem(rating): menuitem = Gtk.ImageMenuItem(_(rating['label_long'])) menuitem.set_visible(True) def _rate(*args, **kwargs): self.parent.report_sfw_rating(file=None, rating=rating['rating']) menuitem.connect("activate", _rate) try: menuitem.set_always_show_image(True) image = Gtk.Image() image.set_from_file(varietyconfig.get_data_file("media", "sfw-%s.svg" % rating['rating'])) menuitem.set_image(image) except: logger.exception(lambda: "Could not set image to NSFW menuitem:") self.sfw_menu.append(menuitem) self.rating_items.append(menuitem) map(_add_menuitem, reversed(sfw_ratings)) separator = Gtk.SeparatorMenuItem.new() separator.set_visible(True) self.sfw_menu.append(separator) self.safe_mode = Gtk.CheckMenuItem(_("_Safe mode")) self.safe_mode.set_visible(True) self.safe_mode.set_active(self.parent.options.safe_mode) self.safe_mode.set_use_underline(True) self.safe_mode_handler_id = self.safe_mode.connect("toggled", self.parent.on_safe_mode_toggled) self.sfw_menu.append(self.safe_mode) self.sfw_menu_item.set_sensitive(True) self.parent.update_indicator() except Exception: logger.exception(lambda: 'Oops, could not populate NSFW menu:') Util.add_mainloop_task(_gui_update, sfw_ratings) except Exception: logger.exception(lambda: 'Oops, could not populate NSFW menu:')
def main(): if datetime.datetime.now() - datetime.datetime.strptime(REL_DATE, "%Y-%m-%d") > datetime.timedelta( days=180): print VarietyWindow.OUTDATED_MSG return # Ctrl-C signal.signal(signal.SIGINT, sigint_handler) signal.signal(signal.SIGTERM, sigint_handler) signal.signal(signal.SIGQUIT, sigint_handler) Util.makedirs(os.path.expanduser(u"~/.config/variety/")) arguments = map(_u, sys.argv[1:]) # validate arguments and set up logging options, args = VarietyWindow.VarietyWindow.parse_options(arguments) set_up_logging(options.verbose) monkeypatch_ssl() if options.verbose > 2: Util.log_all(VarietyWindow.VarietyWindow) if options.verbose > 3: Util.log_all(ThumbsManager.ThumbsManager) Util.log_all(ThumbsWindow.ThumbsWindow) bus = dbus.SessionBus() # ensure singleton if bus.request_name(DBUS_KEY) != dbus.bus.REQUEST_NAME_REPLY_PRIMARY_OWNER: if not arguments: arguments = ["--preferences"] safe_print(_("Variety is already running. Sending the command to the running instance."), "Variety is already running. Sending the command to the running instance.") method = bus.get_object(DBUS_KEY, DBUS_PATH).get_dbus_method("process_command") result = method(arguments) if result: safe_print(result) return # Run the application. window = VarietyWindow.VarietyWindow() global VARIETY_WINDOW VARIETY_WINDOW = window service = VarietyService(window) bus.call_on_disconnection(window.on_quit) window.start(arguments) GObject.timeout_add(2000, check_quit) GObject.threads_init() Gdk.threads_init() Gdk.threads_enter() Gtk.main() Gdk.threads_leave()
def get_for_search_url(self, url): logger.info(lambda: "Fetching quotes from Goodreads for search url=%s" % url) html = Util.fetch(url) try: page = random.randint(1, int(re.findall('Page 1 of (\d+)', html)[0])) url += "&page=%d" % page html = Util.fetch(url) except Exception: pass # probably just one page logger.info(lambda: "Used QuotationsPage url %s" % url) r = r'.*<dl>(.*)</dl>.*' if re.match(r, html, flags=re.M | re.S): html = re.sub(r, '<html><body>\\1</body></html>', html, flags=re.M | re.S) # without this BeautifulSoup gets confused by some scripts return self.get_from_html(url, html)
def get_sanitized_config(parent): options = parent.options config = dict(options.__dict__) del config['configfile'] # remove all references to local folders and files: config['favorites_folder'] = \ 'Default' if options.favorites_folder == os.path.expanduser(u"~/.config/variety/Favorites") else 'Changed' config['download_folder'] = \ 'Default' if options.download_folder == os.path.expanduser(u"~/.config/variety/Downloaded") else 'Changed' config['fetched_folder'] = \ 'Default' if options.fetched_folder == os.path.expanduser(u"~/.config/variety/Fetched") else 'Changed' config['copyto_folder'] = \ 'Default' if options.copyto_folder == 'Default' else 'Changed' config['quotes_favorites_file'] = \ 'Default' if options.quotes_favorites_file == os.path.expanduser(u"~/.config/variety/favorite_quotes.txt") else 'Changed' config['slideshow_custom_folder'] = \ 'Default' if options.slideshow_custom_folder == Util.get_xdg_pictures_folder() else 'Changed' config['sources'] = [s for s in options.sources if s[1] not in (Options.SourceType.FOLDER, Options.SourceType.IMAGE)] config['favorites_operations'] = [ ("custom_folder" if place not in ["/", "Downloaded", "Fetched", "Others"] else place, op) for (place, op) in options.favorites_operations] # add some general OS and desktop environment information config["platform"] = str(platform.platform()) distro = platform.linux_distribution() config["linux_distribution"] = distro config["linux_distribution_distname"] = str(distro[0]) config["linux_distribution_version"] = str(distro[1]) config["linux_distribution_id"] = str(distro[2]) config["desktop_session"] = str(os.getenv('DESKTOP_SESSION')) # add screen info - resolution, monitor count, etc. config["total_screen_width"] = Gdk.Screen.get_default().get_width() config["total_screen_height"] = Gdk.Screen.get_default().get_height() config["monitor_count"] = Gdk.Screen.get_default().get_n_monitors() try: rect = Gdk.Screen.get_default().get_monitor_geometry(Gdk.Screen.get_default().get_primary_monitor()) config['primary_monitor_width'] = rect.width config['primary_monitor_height'] = rect.height except: pass # add some other Variety-specifics things: config["variety_version"] = varietyconfig.get_version() config["image_count"] = parent.image_count with open(os.path.join(parent.config_folder, ".firstrun"), 'r') as f: config["first_run_timestamp"] = f.read() # add a timestamp config["report_timestamp"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) return config
def test_compare_versions(self): self.assertEquals(-1, Util.compare_versions("0.4.10", "0.4.11")) self.assertEquals(-1, Util.compare_versions("0.4.10", "0.5")) self.assertEquals(-1, Util.compare_versions("0.4.10", "1")) self.assertEquals(0, Util.compare_versions("0.4.10", "0.4.10")) self.assertEquals(1, Util.compare_versions("0.4.10", "0.4.8")) self.assertEquals(1, Util.compare_versions("0.4.10", "0.4")) self.assertEquals(1, Util.compare_versions("0.4.10", "0"))
def download_one(self): min_download_interval, min_fill_queue_interval = self.parse_server_options("wallhaven", 0, 0) if time.time() - WallhavenDownloader.last_download_time < min_download_interval: logger.info( lambda: "Minimal interval between Wallhaven downloads is %d, skip this attempt" % min_download_interval ) return None logger.info(lambda: "Downloading an image from Wallhaven.cc, " + self.location) logger.info(lambda: "Queue size: %d" % len(self.queue)) if not self.queue: if time.time() - self.last_fill_time < min_fill_queue_interval: logger.info( lambda: "Wallhaven queue empty, but minimal interval between fill attempts is %d, " "will try again later" % min_fill_queue_interval ) return None self.fill_queue() if not self.queue: logger.info(lambda: "Wallhaven queue still empty after fill request") return None WallhavenDownloader.last_download_time = time.time() wallpaper_url = self.queue.pop() logger.info(lambda: "Wallpaper URL: " + wallpaper_url) s = Util.html_soup(wallpaper_url) src_url = s.find("img", id="wallpaper")["src"] logger.info(lambda: "Image src URL: " + src_url) extra_metadata = {} try: extra_metadata["keywords"] = [el.text.strip() for el in s.find_all("a", {"class": "tagname"})] except: pass try: purity = s.find("div", "sidebar-content").find("label", "purity").text.lower() sfw_rating = {"sfw": 100, "sketchy": 50, "nsfw": 0}[purity] extra_metadata["sfwRating"] = sfw_rating if self.parent and self.parent.options.safe_mode and sfw_rating < 100: logger.info( lambda: "Skipping non-safe download from Wallhaven. " "Is the source %s suitable for Safe mode?" % self.location ) return None except: pass return self.save_locally(wallpaper_url, src_url, extra_metadata=extra_metadata)
def is_valid_content(x): return ( x is not None and "url" in x.attrib and ( Util.is_image(x.attrib["url"]) or ("medium" in x.attrib and x.attrib["medium"].lower() == "image") or ("type" in x.attrib and x.attrib["type"].lower().startswith("image/")) ) )
def test_get_local_name(self): self.assertEqual("img.jpg", Util.get_local_name("http://example.com/a/img?a=b")) self.assertEqual("img.jpg", Util.get_local_name("http://example.com/a/img.jpg?a=b")) self.assertEqual("img.jpg", Util.get_local_name("http://example.com/a/img.jpg#x")) self.assertEqual("img.jpg", Util.get_local_name("http://example.com/a/img.jpg?a=b#x")) self.assertEqual("im g.jpg", Util.get_local_name("http://example.com/a/im%20g.jpg?a=b#x")) self.assertEqual("im_g.jpg", Util.get_local_name("http://example.com/a/im%22g.jpg?a=b#x")) self.assertTrue(len(Util.get_local_name("a" * 1000 + ".jpg")) < 255)
def test_metadata(self): self.assertTrue(os.path.exists('test.jpg')) info = { 'sourceURL': u'b', 'imageURL': u'd', 'sourceName': u'a', 'sourceLocation': u'c', 'sourceType': u'flickr', 'author': u'автор', 'authorURL': u'url', 'keywords': [u'дума1', u'дума2'], 'headline': u'проба1', 'description': u'проба2', 'sfwRating': 50, } self.assertTrue(Util.write_metadata('test.jpg', info)) self.assertEqual(info, Util.read_metadata('test.jpg')) self.assertTrue(os.path.exists('test.svg')) Util.write_metadata('test.svg', info) self.assertEqual(info, Util.read_metadata('test.svg'))
def write_quote(quote, author, infile, outfile, options=None): done_event = threading.Event() w, h = Util.get_scaled_size(infile) exception = [None] def go(): try: surface = QuoteWriter.load_cairo_surface(infile, w, h) QuoteWriter.write_quote_on_surface(surface, quote, author, options) QuoteWriter.save_cairo_surface(surface, outfile) except Exception, e: exception[0] = e finally:
def download_one(self): logger.info(lambda: "Downloading a random image from desktoppr.co") response = Util.fetch_json(self.location) if response["response"]["review_state"] != "safe": logger.info(lambda: "Non-safe image returned by Desktoppr, skipping") return None origin_url = response["response"]["url"] image_url = response["response"]["image"]["url"] return self.save_locally(origin_url, image_url)
def get_extra_metadata(origin_url): photo_id = FlickrDownloader.get_photo_id(origin_url) call = 'https://api.flickr.com/services/rest/?method=flickr.photos.getInfo&api_key=%s&photo_id=%s&format=json&nojsoncallback=1' % \ (API_KEY, photo_id) resp = Util.fetch_json(call) ph = resp['photo'] extra_meta = { 'headline': ph['title']['_content'], 'description': ph['description']['_content'], 'author': ph['owner']['realname'], 'authorURL': 'https://www.flickr.com/photos/%s' % ph['owner']['nsid'], 'keywords': [x['_content'] for x in ph['tags']['tag']], } return extra_meta
def get_random(self): url = "https://www.quotesdaddy.com/feed" bs = Util.xml_soup(url) item = bs.find("item") if not item: logger.warning(lambda: "Could not find quotes for URL " + url) return None link = item.find("link").contents[0].strip() s = item.find("description").contents[0] author = s[s.rindex('- ') + 1:].strip() quote = s[:s.rindex('- ')].strip().replace('"', '').replace('<br>', '\n').replace('<br/>', '\n').strip() quote = u"\u201C%s\u201D" % quote return [{"quote": quote, "author": author, "sourceName": "QuotesDaddy", "link": link}]
def authorize(self, on_success=None, on_failure=None): logger.info(lambda: "Authorizing for Facebook") self.token = '' self.token_expire = '' self.on_success = on_success self.on_failure = on_failure self.hash = Util.random_hash()[:4] # Loads the Facebook OAuth page auth_url = AUTH_URL % ( urllib.quote(self.app_key), urllib.quote(AUTH_REDIRECT_URL % self.hash), urllib.quote(self.scope)) webbrowser.open(auth_url)
def search(self, page=None): url = self.url if page: url = url + ("&" if "?" in self.url else "?") + "page=" + str(page) logger.info(lambda: "Performing wallhaven search: url=%s" % url) soup = Util.html_soup(url) result_count = None try: result_count = int(soup.find("header", {"class": "listing-header"}).find("h1").text.split()[0]) except: pass return soup, result_count
def create_rating_menu(file, main_window): def _set_rating_maker(rating): def _set_rating(widget, rating=rating): try: Util.set_rating(file, rating) main_window.on_rating_changed(file) except Exception: logger.exception(lambda: "Could not set EXIF rating") main_window.show_notification(_("Could not set EXIF rating")) return _set_rating try: actual_rating = Util.get_rating(file) except Exception: actual_rating = None rating_menu = Gtk.Menu() for rating in xrange(5, 0, -1): item = Gtk.CheckMenuItem(u"\u2605" * rating) item.set_draw_as_radio(True) item.set_active(actual_rating == rating) item.set_sensitive(not item.get_active()) item.connect("activate", _set_rating_maker(rating)) rating_menu.append(item) rating_menu.append(Gtk.SeparatorMenuItem.new()) unrated_item = Gtk.CheckMenuItem(_("Unrated")) unrated_item.set_draw_as_radio(True) unrated_item.set_active(actual_rating is None or actual_rating == 0) unrated_item.set_sensitive(not unrated_item.get_active()) unrated_item.connect("activate", _set_rating_maker(None)) rating_menu.append(unrated_item) rejected_item = Gtk.CheckMenuItem(_("Rejected")) rejected_item.set_draw_as_radio(True) rejected_item.set_active(actual_rating is not None and actual_rating < 0) rejected_item.set_sensitive(not rejected_item.get_active()) rejected_item.connect("activate", _set_rating_maker(-1)) rating_menu.append(rejected_item) rating_menu.show_all() return rating_menu
def create_menu(self, file): options = self.load_options() menu = Gtk.Menu() position_menu = Gtk.Menu() for p, v in ThumbsManager.POSITIONS.items(): item = Gtk.CheckMenuItem(ThumbsManager.POSITION_NAMES[p]) item.set_draw_as_radio(True) item.set_active(options.position == v) def _set_position(widget, pos=p): self.set_position(pos) item.connect("activate", _set_position) position_menu.append(item) size_menu = Gtk.Menu() for size in ThumbsManager.SIZES: item = Gtk.CheckMenuItem(str(size)) item.set_draw_as_radio(True) item.set_active(options.breadth == size) def _set_size(widget, size=size): self.set_size(size) item.connect("activate", _set_size) size_menu.append(item) position_item = Gtk.MenuItem(_("Position")) position_item.set_submenu(position_menu) menu.append(position_item) size_item = Gtk.MenuItem(_("Size")) size_item.set_submenu(size_menu) menu.append(size_item) menu.append(Gtk.SeparatorMenuItem.new()) open_file = Gtk.MenuItem(os.path.basename(file).replace('_', '__')) def _open_file(widget): self.parent.open_file(widget, file) open_file.connect("activate", _open_file) menu.append(open_file) open_folder = Gtk.MenuItem(_("Show Containing Folder")) def _open_folder(widget): self.parent.open_folder(widget, file) open_folder.connect("activate", _open_folder) menu.append(open_folder) info = Util.read_metadata(file) if info and "sourceURL" in info and "sourceName" in info: url = info["sourceURL"] source_name = info["sourceName"] if "Fetched" in source_name: label = _("Fetched: Show Origin") else: label = _("View at %s") % source_name if len(label) > 50: label = label[:50] + "..." show_origin = Gtk.MenuItem(label) def _show_origin(widget=None): logger.info(lambda: "Opening url: " + url) webbrowser.open_new_tab(url) show_origin.connect("activate", _show_origin) menu.append(show_origin) menu.append(Gtk.SeparatorMenuItem.new()) rating_item = Gtk.MenuItem(_("Set EXIF Rating")) rating_item.set_submenu(ThumbsManager.create_rating_menu(file, self.parent)) if not os.access(file, os.W_OK): rating_item.set_sensitive(False) menu.append(rating_item) menu.append(Gtk.SeparatorMenuItem.new()) self.copy_to_favorites = Gtk.MenuItem(_("Copy to _Favorites")) self.copy_to_favorites.set_use_underline(True) def _copy_to_favorites(widget): self.parent.copy_to_favorites(widget, file) self.copy_to_favorites.connect("activate", _copy_to_favorites) menu.append(self.copy_to_favorites) self.move_to_favorites = Gtk.MenuItem(_("Move to _Favorites")) self.move_to_favorites.set_use_underline(True) def _move_to_favorites(widget): self.parent.move_to_favorites(widget, file) self.remove_image(file) self.move_to_favorites.connect("activate", _move_to_favorites) self.move_to_favorites.set_visible(False) menu.append(self.move_to_favorites) trash_item = Gtk.MenuItem(_("Delete to _Trash")) trash_item.set_use_underline(True) def _trash(widget): self.parent.move_to_trash(widget, file) trash_item.connect("activate", _trash) menu.append(trash_item) focus = Gtk.MenuItem(_("Where is it from?")) focus.set_sensitive(self.parent.get_source(file) is not None) def _focus(widget): self.parent.focus_in_preferences(widget, file) focus.connect("activate", _focus) menu.append(focus) menu.append(Gtk.SeparatorMenuItem.new()) def close(widget): self.hide(gdk_thread=True, force=True) close_item = Gtk.MenuItem(_("Close")) close_item.connect("activate", close) menu.append(close_item) menu.show_all() favs_op = self.parent.determine_favorites_operation(file) self.parent.update_favorites_menuitems(self, False, favs_op) return menu