def set_graph(self, image_obj, graph_file=NA): self.__cur_image_obj = image_obj digest = None if NA == graph_file: graph_file, digest = GraphDirHandler(image_obj.location).get_graph() if image_obj.location else \ GraphFetcher(size=image_obj.size, option=image_obj.option).fetch(image_obj.pattern) if NA == graph_file: return False debug("[view] %s" % graph_file) with open(graph_file, 'rb') as f: try: image = GraphViewer.get_image(f) except IOError as e: f.close( ) # close f here for we are going to delete the file below # some image cannot be opened (maybe it's not image format?), err msg is 'cannot identify image file' info("fail to open image: %s" % str(e)) GraphFetcher().handle_image(graph_file, DELETE) return False # we met "Decompressed Data Too Large" for ~/Inside Out/Image_124.jpg... except ValueError as e: info("fail to open image: %s" % str(e)) return False self.__cur_graph_file = graph_file self.__graph_history.append( [self.__cur_image_obj, self.__cur_graph_file]) if digest: digest_str = digest + "\n" else: digest_str = "%s:%s\n" % ("path", graph_file) self.__cur_digest = digest_str + "size:%sx%s" % (image.size[0], image.size[1]) self.select_phrase(image_obj.pattern) return self.set_graph_content(graph_file, image)
def delete_image(self, *unused): if self.__cur_image_obj.location: return # spec.: not support remove image that user 'specified' info(get_msg(Msg.remove_image), self.__cur_graph_file) self.__graph_history.remove([self.__cur_image_obj, self.__cur_graph_file]) GraphFetcher.handle_image(self.__cur_graph_file, DELETE) self.cancel_pending_jobs() self.timer_action(True)
def delete_image(self, *unused): if self.__cur_image_obj.location: return # spec.: not support remove image that user 'specified' info("remove image %s" % self.__cur_graph_file) entry = [self.__cur_image_obj, self.__cur_graph_file] self.__graph_history.remove(entry) while self.__graph_history.count(entry) > 0: self.__graph_history.remove(entry) GraphFetcher.handle_image(self.__cur_graph_file, DELETE) self.cancel_pending_jobs() self.timer_action(True)
def set_graph_content(self, graph_file, image=None): if image is None: try: image = GraphViewer.get_image(graph_file) except IOError as e: error("[view] %s" % str(e)) assert False self.__root.geometry( self.__full_geom if self.__fullscreen_mode else '%dx%d+0+0' % (image.size[0], image.size[1])) if self.__fullscreen_mode: resize_width, resize_height, x_pos, y_pos = self.get_adjusted_geom( image.size[0], image.size[1]) try: resized = image.resize((resize_width, resize_height), Image.ANTIALIAS) except IOError as e: # 'incomplete downloaded image' may go here info("fail to convert image to fullscreen: %s" % str(e)) GraphFetcher().handle_image(graph_file, DISCARD) return False image = resized self.__root.title(self.__cur_image_obj.group_name) tk_image_obj = ImageTk.PhotoImage(image) self.__tk_obj_ref = tk_image_obj self.__canvas.delete('all') self.__canvas.create_image(x_pos if self.__fullscreen_mode else 0, y_pos if self.__fullscreen_mode else 0, image=tk_image_obj, anchor=Tkinter.NW) self.show_onscreen_help() self.show_onscreen_info() self.show_onscreen_phrase() return True
def decrement_rank(self, *unused): info(get_msg(Msg.decrease_rank), self.__cur_graph_file) if self.__cur_image_obj.location: msg = GraphDirHandler.handle_image(self.__cur_image_obj.location, self.__cur_graph_file, DEC_RANK) else: msg = GraphFetcher.handle_image(self.__cur_graph_file, DEC_RANK) self.__cur_digest += "\n%s" % msg self.show_onscreen_info()
def decrement_rank(self, *unused): info("decrease rank %s" % self.__cur_graph_file) if self.__cur_image_obj.location: msg = GraphDirHandler.handle_image(self.__cur_image_obj.location, self.__cur_graph_file, DEC_RANK) else: msg = GraphFetcher.handle_image(self.__cur_graph_file, DEC_RANK) self.__cur_digest += "\n%s" % msg self.show_onscreen_info()
def print_pattern(pattern): print("image:", pattern) # print(GraphFetcher.get_cache_file(pattern)) [has_cache, cached_objs] = load(GraphFetcher.get_cache_file(pattern)) assert has_cache for url, i in zip(cached_objs, range(len(cached_objs))): image_slot = cached_objs[url] # print("次序:", i) print("url:", url) print("timestamp:", image_slot.timestamp) print("no.:", image_slot.encoding) print("rank:", image_slot.rank)
def print_pattern(pattern): print("image:", pattern) # print(GraphFetcher.get_cache_file(pattern)) [has_cache, cached_objs] = load(GraphFetcher.get_cache_file(pattern)) assert has_cache for url, i in zip(cached_objs, range(len(cached_objs))): image_slot = cached_objs[url] # print("order:", i) print("url:", url) print("timestamp:", image_slot.timestamp) print("no.:", image_slot.encoding) print("rank:", image_slot.rank)
def traverse(): for pattern in GraphFetcher.get_cache_patterns(): TestFetcher.print_pattern(pattern) continue