def __init__( self, file_urls, file_hash_infos=None, file_save_dir=None, file_sizes=None, concurrent_num=5, ): ''' init docs ''' self.file_urls = file_urls self.file_hash_infos = file_hash_infos self.file_save_dir = file_save_dir self.file_sizes = file_sizes self.concurrent_num = concurrent_num self.greenlet_dict = {} self.fetch_file_dict = {} self.total_size = 0 self.fetch_size_greenlet_dict = {} self.fetch_size_dict = {} self.size_dict = {} self.signal = EventRegister() self.downloaded_size = 0 self.update_time = -1 self.fetch_size_pool = None self.pool = None self.stop_or_pause = False self.error_flag = False
def __init__( self, file_url, file_save_dir=None, file_save_name=None, file_hash_info=None, concurrent_num=5, buffer_size=8192, # in byte min_split_size=20480, # in byte file_size=None): ''' init docs ''' self.file_url = file_url if file_save_dir == None: try: self.file_save_dir = commands.getoutput( "xdg-user-dir DOWNLOAD") except: self.file_save_dir = "/tmp" else: self.file_save_dir = file_save_dir if file_save_name == None: self.file_save_name = os.path.split(file_url)[1] else: self.file_save_name = file_save_name self.file_save_path = os.path.join(self.file_save_dir, self.file_save_name) self.temp_save_dir = os.path.join(self.file_save_dir, "%s_tmp" % self.file_save_name) self.temp_save_path = os.path.join(self.temp_save_dir, self.file_save_name) self.concurrent_num = concurrent_num self.file_hash_info = file_hash_info self.buffer_size = buffer_size self.min_split_size = min_split_size self.fetch = self.get_fetch() self.update_greenlet_callbacks = [] self.signal = EventRegister() self.stop_flag = False self.pause_flag = False self.file_size = file_size self.greenlet_dict = {} self.pool = Pool(self.concurrent_num) self.error_flag = False self.signal.register_event("error", lambda e: self.stop())
def __init__(self, file_urls, file_hash_infos=None, file_save_dir=None, file_sizes=None, concurrent_num=5, ): ''' init docs ''' self.file_urls = file_urls self.file_hash_infos = file_hash_infos self.file_save_dir = file_save_dir self.file_sizes = file_sizes self.concurrent_num = concurrent_num self.greenlet_dict = {} self.fetch_file_dict = {} self.total_size = 0 self.fetch_size_greenlet_dict = {} self.fetch_size_dict = {} self.size_dict = {} self.signal = EventRegister() self.downloaded_size = 0 self.update_time = -1 self.fetch_size_pool = None self.pool = None self.stop_or_pause = False self.error_flag = False
def __init__(self, file_url, file_save_dir=None, file_save_name=None, file_hash_info=None, concurrent_num=5, buffer_size=8192, # in byte min_split_size=20480, # in byte file_size=None ): ''' init docs ''' self.file_url = file_url if file_save_dir == None: try: self.file_save_dir = commands.getoutput("xdg-user-dir DOWNLOAD") except: self.file_save_dir = "/tmp" else: self.file_save_dir = file_save_dir if file_save_name == None: self.file_save_name = os.path.split(file_url)[1] else: self.file_save_name = file_save_name self.file_save_path = os.path.join(self.file_save_dir, self.file_save_name) self.temp_save_dir = os.path.join(self.file_save_dir, "%s_tmp" % self.file_save_name) self.temp_save_path = os.path.join(self.temp_save_dir, self.file_save_name) self.concurrent_num = concurrent_num self.file_hash_info = file_hash_info self.buffer_size = buffer_size self.min_split_size = min_split_size self.fetch = self.get_fetch() self.update_greenlet_callbacks = [] self.signal = EventRegister() self.stop_flag = False self.pause_flag = False self.file_size = file_size self.greenlet_dict = {} self.pool = Pool(self.concurrent_num) self.error_flag = False self.signal.register_event("error", lambda e: self.stop())
class FetchFiles(object): ''' class docs ''' def __init__( self, file_urls, file_hash_infos=None, file_save_dir=None, file_sizes=None, concurrent_num=5, ): ''' init docs ''' self.file_urls = file_urls self.file_hash_infos = file_hash_infos self.file_save_dir = file_save_dir self.file_sizes = file_sizes self.concurrent_num = concurrent_num self.greenlet_dict = {} self.fetch_file_dict = {} self.total_size = 0 self.fetch_size_greenlet_dict = {} self.fetch_size_dict = {} self.size_dict = {} self.signal = EventRegister() self.downloaded_size = 0 self.update_time = -1 self.fetch_size_pool = None self.pool = None self.stop_or_pause = False self.error_flag = False def start(self): self.signal.emit("start") # Fetch file size. self.fetch_size_pool = Pool(self.concurrent_num) for file_url in self.file_urls: self.start_fetch_size_greenlet(file_url) self.fetch_size_pool.join() if not self.error_flag: # Fetch file. self.pool = Pool(self.concurrent_num) if self.file_hash_infos == None: file_infos = map(lambda file_url: (file_url, None), self.file_urls) else: file_infos = zip(self.file_urls, self.file_hash_infos) for file_info in file_infos: self.start_greenlet(file_info) self.pool.join() if not self.stop_or_pause and not self.error_flag: self.signal.emit("finish") def stop(self, pause_flag=False): self.stop_or_pause = True for greenlet in self.fetch_size_greenlet_dict.values(): greenlet.kill() if self.fetch_size_pool: self.fetch_size_pool.kill() for fetch_file in self.fetch_file_dict.values(): fetch_file.stop(pause_flag) for greenlet in self.greenlet_dict.values(): greenlet.kill() if self.pool: self.pool.kill() if pause_flag: self.signal.emit("pause") else: self.signal.emit("stop") def pause(self): self.stop(True) def update(self, update_info): current_time = time.time() if current_time - self.update_time > 1: downloaded_size = 0 for fetch_file in self.fetch_file_dict.values(): if hasattr(fetch_file, "update_info"): downloaded_size += fetch_file.update_info[ "downloaded_size"] if self.update_time == -1: speed = 0 else: speed = float(downloaded_size - self.downloaded_size) / ( current_time - self.update_time) self.update_time = current_time self.downloaded_size = downloaded_size self.signal.emit("update", (float(self.downloaded_size) / self.total_size) * 100, int(speed)) def start_greenlet(self, (file_url, file_hash_info)): fetch_file = FetchFile( file_url=file_url, file_hash_info=file_hash_info, file_save_dir=self.file_save_dir, file_size=self.size_dict[file_url], ) fetch_file.signal.register_event("update", self.update) fetch_file.signal.register_event("error", self.emit_error) greenlet = Greenlet(lambda f: f.start(), fetch_file) self.fetch_file_dict[file_url] = fetch_file self.greenlet_dict[file_url] = greenlet self.pool.start(greenlet)
class FetchFiles(object): ''' class docs ''' def __init__(self, file_urls, file_hash_infos=None, file_save_dir=None, file_sizes=None, concurrent_num=5, ): ''' init docs ''' self.file_urls = file_urls self.file_hash_infos = file_hash_infos self.file_save_dir = file_save_dir self.file_sizes = file_sizes self.concurrent_num = concurrent_num self.greenlet_dict = {} self.fetch_file_dict = {} self.total_size = 0 self.fetch_size_greenlet_dict = {} self.fetch_size_dict = {} self.size_dict = {} self.signal = EventRegister() self.downloaded_size = 0 self.update_time = -1 self.fetch_size_pool = None self.pool = None self.stop_or_pause = False self.error_flag = False def start(self): self.signal.emit("start") # Fetch file size. self.fetch_size_pool = Pool(self.concurrent_num) for file_url in self.file_urls: self.start_fetch_size_greenlet(file_url) self.fetch_size_pool.join() if not self.error_flag: # Fetch file. self.pool = Pool(self.concurrent_num) if self.file_hash_infos == None: file_infos = map(lambda file_url: (file_url, None), self.file_urls) else: file_infos = zip(self.file_urls, self.file_hash_infos) for file_info in file_infos: self.start_greenlet(file_info) self.pool.join() if not self.stop_or_pause and not self.error_flag: self.signal.emit("finish") def stop(self, pause_flag=False): self.stop_or_pause = True for greenlet in self.fetch_size_greenlet_dict.values(): greenlet.kill() if self.fetch_size_pool: self.fetch_size_pool.kill() for fetch_file in self.fetch_file_dict.values(): fetch_file.stop(pause_flag) for greenlet in self.greenlet_dict.values(): greenlet.kill() if self.pool: self.pool.kill() if pause_flag: self.signal.emit("pause") else: self.signal.emit("stop") def pause(self): self.stop(True) def update(self, update_info): current_time = time.time() if current_time - self.update_time > 1: downloaded_size = 0 for fetch_file in self.fetch_file_dict.values(): if hasattr(fetch_file, "update_info"): downloaded_size += fetch_file.update_info["downloaded_size"] if self.update_time == -1: speed = 0 else: speed = float(downloaded_size - self.downloaded_size) / (current_time - self.update_time) self.update_time = current_time self.downloaded_size = downloaded_size self.signal.emit("update", (float(self.downloaded_size) / self.total_size) * 100, int(speed)) def start_greenlet(self, (file_url, file_hash_info)): fetch_file = FetchFile( file_url=file_url, file_hash_info=file_hash_info, file_save_dir=self.file_save_dir, file_size=self.size_dict[file_url], ) fetch_file.signal.register_event("update", self.update) fetch_file.signal.register_event("error", self.emit_error) greenlet = Greenlet(lambda f: f.start(), fetch_file) self.fetch_file_dict[file_url] = fetch_file self.greenlet_dict[file_url] = greenlet self.pool.start(greenlet)