def start_worker(self, row): def on_worker_slice_sent(worker, fid, slice_end, md5): GLib.idle_add(do_worker_slice_sent, fid, slice_end, md5) def do_worker_slice_sent(fid, slice_end, md5): if fid not in self.workers: return row = self.get_row_by_fid(fid) if not row: return row[CURRSIZE_COL] = slice_end total_size = util.get_human_size(row[SIZE_COL])[0] curr_size = util.get_human_size(slice_end, False)[0] row[PERCENT_COL] = int(slice_end / row[SIZE_COL] * 100) row[HUMANSIZE_COL] = '{0} / {1}'.format(curr_size, total_size) self.update_task_db(row) self.add_slice_db(fid, slice_end, md5) def on_worker_merge_files(worker, fid): GLib.idle_add(do_worker_merge_files, fid) def do_worker_merge_files(fid): def on_create_superfile(pcs_file, error=None): if error or not pcs_file: print('on create superfile:', pcs_file, error) do_worker_error(fid) return else: self.remove_slice_db(fid) do_worker_uploaded(fid) block_list = self.get_slice_db(fid) if fid not in self.workers: return row = self.get_row_by_fid(fid) if not row: return if not block_list: # TODO pass else: gutil.async_call( pcs.create_superfile, self.app.cookie, row[PATH_COL], block_list, callback=on_create_superfile) def on_worker_uploaded(worker, fid): GLib.idle_add(do_worker_uploaded, fid) def do_worker_uploaded(fid): if fid not in self.workers: return row = self.get_row_by_fid(fid) if not row: return row[PERCENT_COL] = 100 total_size = util.get_human_size(row[SIZE_COL])[0] row[HUMANSIZE_COL] = '{0} / {1}'.format(total_size, total_size) row[STATE_COL] = State.FINISHED row[STATENAME_COL] = StateNames[State.FINISHED] self.update_task_db(row) self.workers.pop(fid, None) self.app.toast(_('{0} uploaded').format(row[NAME_COL])) self.scan_tasks() def on_worker_disk_error(worker, fid): GLib.idle_add(do_worker_error, fid) def on_worker_network_error(worker, fid): GLib.idle_add(do_worker_error, fid) def do_worker_error(fid): row = self.get_row_by_fid(fid) if not row: return row[STATE_COL] = State.ERROR row[STATENAME_COL] = StateNames[State.ERROR] self.update_task_db(row) self.remove_worker(fid, stop=False) self.scan_tasks() if row[FID_COL] in self.workers: return row[STATE_COL] = State.UPLOADING row[STATENAME_COL] = StateNames[State.UPLOADING] worker = Uploader(self, row, self.app.cookie, self.app.tokens) self.workers[row[FID_COL]] = (worker, row) # For slice upload worker.connect('slice-sent', on_worker_slice_sent) worker.connect('merge-files', on_worker_merge_files) # For upload_small_files/rapid_upload worker.connect('uploaded', on_worker_uploaded) worker.connect('disk-error', on_worker_disk_error) worker.connect('network-error', on_worker_network_error) worker.start()
def start_worker(self, row): def on_worker_slice_sent(worker, fid, slice_end, md5): GLib.idle_add(do_worker_slice_sent, fid, slice_end, md5) def do_worker_slice_sent(fid, slice_end, md5): if fid not in self.workers: return row = self.get_row_by_fid(fid) if not row: return row[CURRSIZE_COL] = slice_end total_size = util.get_human_size(row[SIZE_COL])[0] curr_size = util.get_human_size(slice_end, False)[0] row[PERCENT_COL] = int(slice_end / row[SIZE_COL] * 100) row[HUMANSIZE_COL] = '{0} / {1}'.format(curr_size, total_size) self.update_task_db(row) self.add_slice_db(fid, slice_end, md5) def on_worker_merge_files(worker, fid): GLib.idle_add(do_worker_merge_files, fid) def do_worker_merge_files(fid): def on_create_superfile(pcs_file, error=None): if error or not pcs_file: self.app.toast(_('Failed to upload, please try again')) logger.error('UploadPage.do_worker_merge_files: %s, %s' % (pcs_file, error)) do_worker_error(fid) return else: self.remove_slice_db(fid) do_worker_uploaded(fid) block_list = self.get_slice_db(fid) if fid not in self.workers: return row = self.get_row_by_fid(fid) if not row: return if not block_list: # TODO: pass else: gutil.async_call(pcs.create_superfile, self.app.cookie, row[PATH_COL], block_list, callback=on_create_superfile) def on_worker_uploaded(worker, fid): GLib.idle_add(do_worker_uploaded, fid) def do_worker_uploaded(fid): if fid not in self.workers: return row = self.get_row_by_fid(fid) if not row: return row[PERCENT_COL] = 100 total_size = util.get_human_size(row[SIZE_COL])[0] row[HUMANSIZE_COL] = '{0} / {1}'.format(total_size, total_size) row[STATE_COL] = State.FINISHED row[STATENAME_COL] = StateNames[State.FINISHED] self.update_task_db(row, force=True) self.workers.pop(fid, None) self.app.toast(_('{0} uploaded').format(row[NAME_COL])) self.app.home_page.reload() self.scan_tasks() def on_worker_disk_error(worker, fid): GLib.idle_add(do_worker_error, fid) def on_worker_network_error(worker, fid): GLib.idle_add(do_worker_error, fid) def do_worker_error(fid): row = self.get_row_by_fid(fid) if not row: return row[STATE_COL] = State.ERROR row[STATENAME_COL] = StateNames[State.ERROR] self.update_task_db(row) self.remove_worker(fid, stop=False) self.scan_tasks() if row[FID_COL] in self.workers: return row[STATE_COL] = State.UPLOADING row[STATENAME_COL] = StateNames[State.UPLOADING] worker = Uploader(self, row, self.app.cookie, self.app.tokens) self.workers[row[FID_COL]] = (worker, row) # For slice upload worker.connect('slice-sent', on_worker_slice_sent) worker.connect('merge-files', on_worker_merge_files) # For upload_small_files/rapid_upload worker.connect('uploaded', on_worker_uploaded) worker.connect('disk-error', on_worker_disk_error) worker.connect('network-error', on_worker_network_error) worker.start()