def __init__(self): self.api = FACommandAPI() self.download_queue = [] self.cache = {} self.rwl = RWLock() self.last_operation_time = None self.__host = None self.__from = None self.__destination = None
class FAClient: """ Encapsulates all rwl and api operations """ def __init__(self): self.api = FACommandAPI() self.download_queue = [] self.cache = {} self.rwl = RWLock() self.last_operation_time = None self.__host = None self.__from = None self.__destination = None def __update_last_operation_time(self, op_time): self.last_operation_time = op_time def __p1(self): pass def __append_download_queue(self, contents): """ SINGLE WRITER of File QUEUE :param contents: list of files :return: """ logging.getLogger().info("add_contents_to_download_queue ...") logging.getLogger().debug("Acquiring rwl Read") self.rwl.acquire_read() for content in contents: # chech cache first uid = uniq_identifier(content) if uid in self.cache: # skip this. continue if content not in self.download_queue: logging.getLogger().debug("Promoting rwl.") self.rwl.promote() print "Appending ", content self.download_queue.append(content) logging.getLogger().debug("Demoting rwl.") self.rwl.demote() logging.getLogger().debug("Releasing rwl Read") self.rwl.release() def load_cache(self): current_dir = os.path.dirname(os.path.realpath(__file__)) cache_path = os.path.join(current_dir, "ktmb_cache.json") if os.path.exists(cache_path): with open(cache_path) as cache_data: self.cache = json.load(cache_data) else: logging.getLogger().debug("Could not find cache file. Skipping.") def save_cache(self): current_dir = os.path.dirname(os.path.realpath(__file__)) cache_path = os.path.join(current_dir, "ktmb_cache.json") with open(cache_path, 'w') as outfile: json.dump(self.cache, outfile) def io_loop(self): self.load_cache() if is_alive(self.__host): last_operation_time = self.api.get_last_operation_time() if last_operation_time['Status'] == 0: raise Exception(last_operation_time['Result']) logging.getLogger().debug( "Last Change: " + str(last_operation_time['Result'])) if last_operation_time['Result'] == self.last_operation_time: logging.getLogger().debug("Last Change is same.") else: self.__update_last_operation_time( last_operation_time['Result']) root_directories = self.api.get_directories() if root_directories['Status'] == 0: raise Exception(root_directories['Result']) if len(root_directories['Result']) > 0: for directory in root_directories['Result']: files = self.api.get_files(path=directory['Path']) if files['Status'] == 0: logging.getLogger().error(files['Result']) continue if len(files['Result']) > 0: self.__append_download_queue(files['Result']) else: self.__p1() time.sleep(WAIT_1) self.io_loop() def queue_handling_loop(self): any_download = False while True: if not is_alive(self.__host): time.sleep(WAIT_2) continue any_download = False if len(self.download_queue) > 0: self.rwl.acquire_read() logging.getLogger().debug( "QUEUE Lenght: " + str(len(self.download_queue))) elem = self.download_queue[0] img_name = get_image_name(elem) tick = timeit.default_timer() self.api.download_file(elem["Path"], os.path.join( self.__destination, img_name), skip_existing=True) tock = timeit.default_timer() logging.getLogger().info( "Image downloaded " + str(tock - tick)) self.rwl.promote() self.download_queue.remove(elem) self.cache[uniq_identifier(elem)] = elem self.rwl.demote() self.rwl.release() any_download = True if any_download: time.sleep(WAIT_3) else: time.sleep(WAIT_2) def setup(self, host, source, destination): self.__host = host self.__from = source self.__destination = destination