Пример #1
0
 def set_graph_content(self, graph_file, image=None):
     if image is None:
         try:
             image = GraphViewer.get_image(graph_file)
         except IOError as e:
             error(str(e))
             assert False
     self.__root.geometry(self.__full_geom if self.__fullscreen_mode else
                          '%dx%d+0+0' % (image.size[0], image.size[1]))
     if self.__fullscreen_mode:
         resize_width, resize_height, x_pos, y_pos = self.get_adjusted_geom(image.size[0], image.size[1])
         try:
             resized = image.resize((resize_width, resize_height), Image.ANTIALIAS)
         except IOError as e:
             # 'incomplete downloaded image' may go here
             info(get_msg(Msg.fail_to_convert_image_to_fullscreen), str(e))
             GraphFetcher().handle_image(graph_file, DISCARD)
             return False
         image = resized
     self.__root.title(self.__cur_image_obj.group_name)
     tk_image_obj = ImageTk.PhotoImage(image)
     self.__tk_obj_ref = tk_image_obj
     self.__canvas.delete('all')
     self.__canvas.create_image(x_pos if self.__fullscreen_mode else 0, y_pos if self.__fullscreen_mode else 0,
                                image=tk_image_obj, anchor=Tkinter.NW)
     self.show_onscreen_help()
     self.show_onscreen_info()
     self.show_onscreen_phrase()
     return True
Пример #2
0
def save(pickle_file, value):
    pickle_fd = open(pickle_file, "w")
    try:
        cPickle.dump(value, pickle_fd)
    except AttributeError as msg:
        error("fail to write cache %s" % str(msg))
    pickle_fd.close()
Пример #3
0
def save(pickle_file, value):
    pickle_fd = open(pickle_file, "w")
    try:
        cPickle.dump(value, pickle_fd)
    except AttributeError as msg:
        error(get_msg(Msg.fail_to_write_cache), str(msg))
    pickle_fd.close()
Пример #4
0
 def set_graph_content(self, graph_file, image=None):
     if image is None:
         try:
             image = GraphViewer.get_image(graph_file)
         except IOError as e:
             error("[view] %s" % str(e))
             assert False
     self.__root.geometry(
         self.__full_geom if self.__fullscreen_mode else '%dx%d+0+0' %
         (image.size[0], image.size[1]))
     if self.__fullscreen_mode:
         resize_width, resize_height, x_pos, y_pos = self.get_adjusted_geom(
             image.size[0], image.size[1])
         try:
             resized = image.resize((resize_width, resize_height),
                                    Image.ANTIALIAS)
         except IOError as e:
             # 'incomplete downloaded image' may go here
             info("fail to convert image to fullscreen: %s" % str(e))
             GraphFetcher().handle_image(graph_file, DISCARD)
             return False
         image = resized
     self.__root.title(self.__cur_image_obj.group_name)
     tk_image_obj = ImageTk.PhotoImage(image)
     self.__tk_obj_ref = tk_image_obj
     self.__canvas.delete('all')
     self.__canvas.create_image(x_pos if self.__fullscreen_mode else 0,
                                y_pos if self.__fullscreen_mode else 0,
                                image=tk_image_obj,
                                anchor=Tkinter.NW)
     self.show_onscreen_help()
     self.show_onscreen_info()
     self.show_onscreen_phrase()
     return True
Пример #5
0
 def __init__(self, in_file):
     self.__fd = open(in_file)
     try:
         self.__json_data = json.load(self.__fd)
     except Exception, e:
         error(get_msg(Msg.fail_read_file) + "\"", in_file, "\"")
         error(str(e))
         assert False
Пример #6
0
 def get_cache_file(pattern):
     if not os.path.exists(pickle_home()):
         try:
             mkdir_p(pickle_home())
         except OSError as e:
             error("[fetch] cannot create program directory, program exits:")
             error(str(e))
             sys.exit()
     return pickle_home() + str(pattern) + ".pickle"
Пример #7
0
 def __init__(self, in_file):
     self.__fd = open(in_file)
     try:
         self.__json_data = json.load(self.__fd)
     except ValueError as e:
         error("[json] json syntax error \"%s\"" % in_file)
         error(str(e))
         import sys
         sys.exit()
Пример #8
0
    def __init__(self, data, root="", is_exclude=False):
        self.is_exclude = is_exclude
        if not type(data) in [str, dict]:
            error("[config] entry 'src' shall contain 'str' or 'dict' value instead of %s, program exit..."
                  % type(data))
            sys.exit()
        simple_spec = type(data) is str

        self.root = data[ROOT_KEY] if not simple_spec and ROOT_KEY in data else root
        assert type(self.root) is str

        # file: specify files by give accurate filename/dirname
        file_or_dir = data if simple_spec else data[FILE_KEY] if FILE_KEY in data else None
        assert not file_or_dir or type(file_or_dir) in [str, list]
        self.file_or_dir = file_or_dir if not file_or_dir or type(file_or_dir) is list else [file_or_dir]
        # ext: specify files by extension name
        ext = data[EXT_KEY] if not simple_spec and EXT_KEY in data else None
        assert not ext or type(ext) in [str, list]
        self.ext = ext if not ext or type(ext) is list else [ext]
        # re: specify files by regular expression matching
        re_data = data[RE_KEY] if not simple_spec and RE_KEY in data else None
        assert not re_data or type(re_data) in [str, list]
        self.re = re_data if not re_data or type(re_data) is list else [re_data]
        # dyn: specify files by re + custom code snippets
        dynamic = data[DYNAMIC_KEY] if not simple_spec and DYNAMIC_KEY in data else None
        assert not dynamic or type(dynamic) is list
        # dynamic shall be either a dyn-item(re-str, import-str, eval-str) list, or a list of dyn-items
        assert not dynamic or 0 == len(dynamic) or \
            (type(dynamic[0]) is list or (type(dynamic[0]) is str and len(dynamic) == 3))
        self.dynamic = dynamic if not dynamic or type(dynamic[0]) is list else [dynamic]

        assert self.file_or_dir or self.ext or self.re or self.dynamic

        if "" == self.root and self.file_or_dir and len(self.file_or_dir) == 1:
            dirname, basename = os.path.split(self.file_or_dir[0])
            self.root = dirname
            if len(basename):
                self.file_or_dir = [basename]
            else:
                self.file_or_dir = None
                self.re = [".*"]

        if "" is not self.root and not self.is_exclude:
            debug("root: %s" % self.root)

        self.show_sources()
        if len(self.root) > 0 and self.root[-1] != DIR_DELIM:
            self.root += DIR_DELIM

        # exclude: sources that need not backup (kept by a child 'Source' instance)
        assert not self.is_exclude or EXCLUDE_KEY not in data  # nested 'exclude' entry is not supported
        self.exclude = Source(data[EXCLUDE_KEY], self.root, True) if EXCLUDE_KEY in data else None
Пример #9
0
 def timer_action(self, user_next_image=False):
     if not user_next_image and self.__pause_slideshow:
         self.prepare_for_next_view(get_slideshow_rate() * 1000)
         return
     success = self.set_graph(self.select_pattern())
     if not success:
         self.prepare_for_next_view(1, "try fetch image again")
         if GraphViewer.CURR_FAIL_COUNT >= GraphViewer.FAIL_COUNT_TO_EXIT:
             error("[view] fail to fetch more images, program exits")
             sys.exit()
         GraphViewer.CURR_FAIL_COUNT += 1
         return
     GraphViewer.CURR_FAIL_COUNT = 0
     self.prepare_for_next_view(get_slideshow_rate() * 1000)
Пример #10
0
def load(pickle_file):
    """output: is_exist, value"""
    try:
        pickle_fd = open(pickle_file, "r")
    except IOError as err:
        if errno.ENOENT == err.errno:
            show(get_msg(Msg.cache_file_does_not_exist), pickle_file)
            return False, None
        assert False
    try:
        value = cPickle.load(pickle_fd)
        return True, value
    except (ValueError, UnpicklingError, EOFError):
        error(get_msg(Msg.cannot_read_pickle_file), pickle_file, get_msg(Msg.suggest_re_fetch_pickle_file))
        assert False
Пример #11
0
def load(pickle_file):
    """output: is_exist, value"""
    try:
        pickle_fd = open(pickle_file, "r")
    except IOError as err:
        if errno.ENOENT == err.errno:
            debug("cache file does not exist: %s" % pickle_file)
            return False, None
        assert False
    try:
        value = cPickle.load(pickle_fd)
        return True, value
    except (ValueError, UnpicklingError, EOFError):
        error("cannot read pickle file: %s, suggest re-fetch the pickle file" %
              pickle_file)
        assert False
Пример #12
0
 def get_graph_file(self, pattern, url, cached_encoding):
     """output: graph_file, encoding"""
     if NA == cached_encoding:  # mean this url is not retrievable
         return NA, NA
     file_encoding = cached_encoding
     if not file_encoding:
         file_encoding = GraphFetcher.get_file_encoding(pattern)
     graph_dir = GraphFetcher.get_graph_dir(pattern)
     if not os.path.exists(graph_dir):
         try:
             os.makedirs(graph_dir)
         except OSError as e:
             error(get_msg(Msg.cannot_create_directory), str(e))
             import sys
             sys.exit()
     abs_graph_file = graph_dir + "image_" + file_encoding + ".jpg"
     if os.path.exists(abs_graph_file):
         return abs_graph_file, file_encoding
     if not self.__network_reachable:
         info(get_msg(Msg.give_up_fetch_image))
         return NA, None
     self.__has_write = True
     try:
         info(get_msg(Msg.fetch_image), url)
         try:
             web_content = urllib2.urlopen(url, timeout=10)
         except httplib.BadStatusLine:
             info(get_msg(Msg.obtain_unrecognized_status_code), url)
             return NA, NA
         fd = open(abs_graph_file, 'wb')
         fd.write(web_content.read())
         fd.close()
         assert os.path.exists(abs_graph_file)
         if os.stat(abs_graph_file).st_size <= 10240:
             info(get_msg(Msg.give_up_acquired_image_with_size), os.stat(abs_graph_file).st_size, "Bytes")
             info(get_msg(Msg.remove_image), abs_graph_file)
             os.remove(abs_graph_file)
             return NA, NA
         info(get_msg(Msg.fetch_succeed))
         return abs_graph_file, file_encoding
     except (IOError, httplib.IncompleteRead, ssl.CertificateError) as e:
         info(get_msg(Msg.failed_url), url)
         info(get_msg(Msg.error_message), str(e))
         if os.path.exists(abs_graph_file):
             fd.close()
             os.remove(abs_graph_file)
         return NA, NA
Пример #13
0
 def get_graph_file(self, pattern, url, cached_encoding):
     """output: graph_file, encoding"""
     if NA == cached_encoding:  # mean this url is not retrievable
         return NA, NA
     file_encoding = cached_encoding
     if not file_encoding:
         file_encoding = GraphFetcher.get_file_encoding(pattern)
     graph_dir = GraphFetcher.get_graph_dir(pattern)
     if not os.path.exists(graph_dir):
         try:
             mkdir_p(graph_dir)
         except OSError as e:
             error("[fetch] cannot create program directory, program exits:")
             error(str(e))
             sys.exit()
     abs_graph_file = graph_dir + "image_" + file_encoding + ".jpg"
     if os.path.exists(abs_graph_file):
         return abs_graph_file, file_encoding
     if not self.__network_reachable:
         info("give up fetching image (due to no network connection):")
         return NA, None
     self.__has_write = True
     try:
         info("fetch image: %s" % url)
         try:
             web_content = urllib2.urlopen(url, timeout=10)
         except httplib.BadStatusLine:
             info("give up fetching image (due to no network connection): %s" % url)
             return NA, NA
         fd = open(abs_graph_file, 'wb')
         fd.write(web_content.read())
         fd.close()
         assert os.path.exists(abs_graph_file)
         if os.stat(abs_graph_file).st_size <= 10240:
             info("give up acquired image with size: %s Bytes" % os.stat(abs_graph_file).st_size)
             info("remove image: %s" % abs_graph_file)
             os.remove(abs_graph_file)
             return NA, NA
         info("fetch succeeded")
         return abs_graph_file, file_encoding
     except (IOError, httplib.IncompleteRead, ssl.CertificateError) as e:
         info("failed url: %s" % url)
         info("error: %s" % str(e))
         if os.path.exists(abs_graph_file):
             fd.close()
             os.remove(abs_graph_file)
         return NA, NA
Пример #14
0
 def __init__(self, data):
     self.path = None
     self.detail = DEFAULT_DETAIL
     if not data:
         return
     if not type(data) in [str, dict]:
         error("[config] entry 'rpt' shall contain 'str' or 'dict' value instead of %s" % type(data))
         return
     path = data[PATH_KEY] if type(data) is not str and PATH_KEY in data else data
     if "" == path:
         return
     assert type(path) is str and "" != path
     if 0 == path.find(S3_HEAD):
         info("[config] report to aws s3 (%s) is not supported" % path)
         return
     if path[-1] != DIR_DELIM:
         path += DIR_DELIM
     self.path = path
     self.detail = get_bool_value(data, DETAIL_KEY, self.detail)
     debug("report path: %s" % self.path)
     debug("report detail: %s" % ("yes" if self.detail else "no"))
Пример #15
0
Файл: s3.py Проект: r-kan/BUFFY
 def __init__(self, dst_root, setting, args):
     assert 0 == dst_root.find(S3_HEAD)
     super(S3, self).__init__("s3", dst_root, setting, args)
     if self._dst_root[-1] != S3_DELIM:
         self._dst_root += S3_DELIM
     aws_path = get_aws_path()
     if not aws_path:
         error("[s3] cannot locate aws")
         self.okay = False
         return
     self.aws = aws_path
     self.cp_cmd = self.aws + " s3 cp "  # use for logging
     wo_head_path = self._dst_root[len(S3_HEAD):]
     end_bucket = wo_head_path.find(S3_DELIM)
     bucket = wo_head_path if -1 == end_bucket else wo_head_path[:end_bucket]
     info("[s3] checking bucket '%s' existence..." % bucket)
     cmd_list = [self.aws, "s3", "ls", bucket]
     stdout_data, _ = Popen(cmd_list, stdout=PIPE, stderr=STDOUT).communicate()
     res = pp_popen_out(stdout_data)
     # we assume 'aws ls' always gives a newline (platform dependent) for its stdout when error occurs
     self.okay = 0 != res.find('\r\n' if is_windows() else "\\n")
     if not self.okay:
         warning("[s3] fail to locate bucket '%s'" % bucket)
Пример #16
0
    def __init__(self, config_file=None, src=None, dst=None, name=None, compress=None, encoding=None, rpt=None):
        data = None
        if config_file:
            if not os.path.exists(config_file):
                error("[BUFFY] config file \"%s\" does not exist, program exit..." % config_file)
                sys.exit()
            info("[BUFFY] reading config file \"%s\"..." % config_file)
            with open(config_file) as config_fp:
                import json
                data = json.load(config_fp)

        if not dst and DST_KEY not in data:
            error("[config] no \'dst\' specified, program exit...")
            sys.exit()
        dst = data[DST_KEY] if not dst else dst
        if not type(dst) in [str, list]:
            error("[config] entry 'src' shall contain 'str' or 'list' value instead of %s, program exit..."
                  % type(dst))
            sys.exit()

        if not src and SRC_KEY not in data:
            error("[config] no \'src\' specified, program exit...")
            sys.exit()

        self.dst = [dst] if type(dst) is str else dst
        self.name = name if name else data[NAME_KEY] if data and NAME_KEY in data else ""
        assert type(self.name) is str
        self.compress = compress if None is not compress else get_bool_value(data, COMPRESS_KEY, DEFAULT_COMPRESS)
        self.encoding = encoding if None is not encoding else get_bool_value(data, ENCODING_KEY, DEFAULT_ENCODING)

        debug("------------------------")
        if "" != self.name:
            debug("name: %s" % self.name)
        show_list(self.dst, "dst")
        self.src = Source(src if src else data[SRC_KEY])
        debug("compress: %s" % ("yes" if self.compress else "no"))
        debug("encoding: %s" % ("yes" if self.encoding else "no"))
        self.rpt = Report(rpt if rpt else data[RPT_KEY] if data and RPT_KEY in data else None)
        debug("------------------------")
Пример #17
0
 def print_error(data):
     assert isinstance(data, dict) and "message" in data
     error("[search] search engine error: %s" % data["message"])
Пример #18
0
 def print_error(data):
     assert isinstance(data, dict) and "message" in data
     error(get_msg(Msg.search_engine_err_msg), data["message"])