Beispiel #1
0
 def get_download_link(self):
     self.red_url = pcs.get_download_link(self.cookie, self.tokens,
                                          self.row[PATH_COL])
     if not self.red_url:
         print('Error: Failed to get download link')
         self.network_error()
     self.download()
Beispiel #2
0
 def get_download_link(self):
     self.red_url = pcs.get_download_link(
             self.cookie, self.tokens, self.row[PATH_COL])
     if not self.red_url:
         print('Failed to get download link')
         self.network_error()
     self.download()
Beispiel #3
0
 def get_download_link(self):
     meta = pcs.get_metas(self.cookie, self.tokens, self.row[PATH_COL])
     if not meta or meta['errno'] != 0 or 'info' not in meta:
         print('Error: failed to get meta info:', meta)
         self.emit('network-error', self.row[FSID_COL])
     else:
         dlink = meta['info'][0]['dlink']
         red_url, req_id = pcs.get_download_link(self.cookie, dlink)
         if not req_id:
             print('Error: failed to get req_id:', req_id)
             self.emit('network-error', self.row[FSID_COL])
         else:
             self.red_url = red_url
             self.download()
Beispiel #4
0
 def get_download_link(self):
     meta = pcs.get_metas(self.cookie, self.tokens, self.row[PATH_COL])
     if not meta or meta['errno'] != 0 or 'info' not in meta:
         self.network_error()
         return
     pcs_files = meta['info']
     if not pcs_files:
         print('pcs_files in meta is empty, abort')
         self.network_error()
         return
     pcs_file = pcs_files[0]
     if str(pcs_file['fs_id']) != self.row[FSID_COL]:
         print('FSID not match, abort.')
         self.network_error()
         return
     dlink = pcs_file['dlink']
     red_url, req_id = pcs.get_download_link(self.cookie, dlink)
     if not req_id:
         self.network_error()
     else:
         self.red_url = red_url
         self.download()
Beispiel #5
0
 def get_download_link(self):
     meta = pcs.get_metas(self.cookie, self.tokens, self.row[PATH_COL])
     if not meta or meta['errno'] != 0 or 'info' not in meta:
         self.network_error()
         return
     pcs_files = meta['info']
     if not pcs_files:
         print('pcs_files in meta is empty, abort')
         self.network_error()
         return
     pcs_file = pcs_files[0]
     if str(pcs_file['fs_id']) != self.row[FSID_COL]:
         print('FSID not match, abort.')
         self.network_error()
         return
     dlink = pcs_file['dlink']
     red_url, req_id = pcs.get_download_link(self.cookie, dlink)
     if not req_id:
         self.network_error()
     else:
         self.red_url = red_url
         self.download()
Beispiel #6
0
    def download(self):
        row = self.row
        if not os.path.exists(row[SAVEDIR_COL]):
            os.makedirs(row[SAVEDIR_COL], exist_ok=True)
        filepath, tmp_filepath, conf_filepath = get_tmp_filepath(
                row[SAVEDIR_COL], row[SAVENAME_COL]) 

        if os.path.exists(filepath):
            if self.download_mode == DownloadMode.IGNORE:
                self.emit('downloaded', row[FSID_COL])
                logger.debug('File exists, ignored!')
                return
            elif self.download_mode == DownloadMode.NEWCOPY:
                name, ext = os.path.splitext(filepath)
                filepath = '{0}_{1}{2}'.format(name, util.curr_time(), ext)

        url = pcs.get_download_link(self.cookie, self.tokens, row[PATH_COL])
        if not url:
            row[STATE_COL] = State.ERROR
            self.emit('network-error', row[FSID_COL])
            logger.warn('Failed to get url to download')
            return

        if os.path.exists(conf_filepath) and os.path.exists(tmp_filepath):
            with open(conf_filepath) as conf_fh:
                status = json.load(conf_fh)
            threads = len(status)
            file_exists = True
            fh = open(tmp_filepath, 'rb+')
            fh.seek(0)
        else:
            req = net.urlopen_simple(url)
            if not req:
                logger.warn('Failed to get url to download')
                self.emit('network-error', row[FSID_COL])
                return
            content_length = req.getheader('Content-Length')
            # Fixed: baiduPCS using non iso-8859-1 codec in http headers
            if not content_length:
                match = re.search('\sContent-Length:\s*(\d+)', str(req.headers))
                if not match:
                    logger.warn('Failed to get url to download')
                    self.emit('network-error', row[FSID_COL])
                    return
                content_length = match.group(1)
            size = int(content_length)
            if size <= SMALL_FILE_SIZE:
                threads = 1
            else:
                threads = self.default_threads
            average_size, pad_size = divmod(size, threads)
            file_exists = False
            status = []
            fh = open(tmp_filepath, 'wb')
            try:
                fh.truncate(size)
            except (OSError, IOError):
                e = truncate.format_exc()
                logger.error(e)
                self.emit('disk-error', row[FSID_COL], tmp_filepath)
                return

        # task list
        tasks = []
        # message queue
        queue = Queue()
        # threads lock
        lock = threading.RLock()
        for id_ in range(threads):
            if file_exists:
                start_size, end_size, received = status[id_]
                if start_size + received >= end_size:
                    # part of file has been downloaded
                    continue
                start_size += received
            else:
                start_size = id_ * average_size
                end_size = start_size + average_size - 1
                if id_ == threads - 1:
                    end_size = end_size + pad_size + 1
                status.append([start_size, end_size, 0])
            task = DownloadBatch(id_, queue, url, lock, start_size, end_size,
                                 fh, self.timeout)
            tasks.append(task)

        for task in tasks:
            task.start()

        try:
            conf_count = 0
            done = 0
            self.emit('started', row[FSID_COL])
            while row[STATE_COL] == State.DOWNLOADING:
                id_, received = queue.get()
                # FINISHED
                if received == BATCH_FINISISHED:
                    done += 1
                    if done == len(tasks):
                        row[STATE_COL] = State.FINISHED
                        break
                    else:
                        continue
                # error occurs
                elif received == BATCH_ERROR:
                    row[STATE_COL] = State.ERROR
                    break
                status[id_][2] += received
                conf_count += 1
                # flush data and status to disk
                if conf_count > THRESHOLD_TO_FLUSH:
                    with lock:
                        if not fh.closed:
                            fh.flush()
                    with open(conf_filepath, 'w') as fh:
                        json.dump(status, fh)
                    conf_count = 0
                received_total = sum(t[2] for t in status)
                self.emit('received', row[FSID_COL], received, received_total)
        except Exception:
            logger.error(traceback.format_exc())
            row[STATE_COL] = State.ERROR
        with lock:
            if not fh.closed:
                fh.close()
        for task in tasks:
            if task.isAlive():
                task.stop()
        with open(conf_filepath, 'w') as fh:
            json.dump(status, fh)

        if row[STATE_COL] == State.CANCELED:
            os.remove(tmp_filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
        elif row[STATE_COL] == State.ERROR:
            self.emit('network-error', row[FSID_COL])
        elif row[STATE_COL] == State.FINISHED:
            self.emit('downloaded', row[FSID_COL])
            os.rename(tmp_filepath, filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
Beispiel #7
0
    def download(self, o):
        if self.files and self.files[self.item_pos] and not self.files[self.item_pos]['isdir']:
            self.screen.nodelay(1)
            end_size = self.files[self.item_pos]['size']
            path = self.files[self.item_pos]['path']
            o.clear()
            o.border(0)
            curses.echo()
            o.addstr(1, 2, "DownloadPath: (Default: ~/)")
            o.refresh()
            download_path = o.getstr(2, 2, 40).decode(encoding='utf-8')
            if download_path == "":
                download_path = expanduser("~")
            else:
                download_path = expanduser(download_path)
            filepath, tmp_filepath , conf_filepath = get_tmp_filepath(download_path, path[1:])
            if not os.path.exists(os.path.dirname(tmp_filepath)):
                os.makedirs(os.path.dirname(tmp_filepath), exist_ok=True)
            o.clear()
            o.border(0)
            o.refresh()
            if os.path.exists(filepath):
                o.clear()
                o.border(0)
                o.addstr(1, 2, "File Already Exists.")
                o.addstr(2, 2, "Press Any Key to Continue")
                o.refresh()
                self.screen.getch()
                return
            o.addstr(1, 2, "Getting Download links...")
            o.refresh()
            url = pcs.get_download_link(self.cookie, self.tokens, path)
            if not url:
                o.clear()
                o.border(0)
                o.addstr(1, 2, "Failed to get url")
                o.addstr(2, 2, "Press ESC to abort")
                o.refresh()
                self.screen.getch()
                return

            o.addstr(2, 2, "Prepare file...")
            o.refresh()
            if os.path.exists(conf_filepath) and os.path.exists(tmp_filepath):
                with open(conf_filepath) as conf_fh:
                    status = json.load(conf_fh)
                file_exists = True
                fh = open(tmp_filepath, 'rb+')
            else:
                req = net.urlopen_simple(url)
                if not req:
                    o.clear()
                    o.border(0)
                    o.addstr(1, 2, "Failed to request")
                    o.addstr(2, 2, "Press ESC to abort")
                    o.refresh()
                    self.screen.getch()
                    return
                content_length = req.getheader('Content-Length')
                if not content_length:
                    match = re.search('\sContent-Length:\s*(\d+)', str(req.headers))
                    if not match:
                        o.clear()
                        o.border(0)
                        o.addstr(1, 2, "Failed to match content-length")
                        o.addstr(2, 2, "Press ESC to abort")
                        o.refresh()
                        self.screen.getch()
                        return
                    content_length = match.group(1)
                size = int(content_length)
                if size == 0:
                    open(filepath, 'a').close()
                    o.clear()
                    o.border(0)
                    o.addstr(1, 2, "File already downloaded")
                    o.addstr(2, 2, "Press ESC to abort")
                    o.refresh()
                    self.screen.getch()
                    return
                file_exists = False
                fh = open(tmp_filepath, 'wb')
                try:
                    fh.truncate(size)
                except (OSError, IOError):
                    o.clear()
                    o.border(0)
                    o.addstr(1, 2, "Disk error (disk is full?)")
                    o.addstr(2, 2, "Press ESC to abort")
                    o.refresh()
                    self.screen.getch()
                    return

            start_size = 0
            if file_exists:
                start_size, end_size, received = status
            offset = start_size
            count = 0
            while offset < end_size:
                status = [offset, end_size, 0]
                count += 1
                o.clear()
                o.border(0)
                c = self.screen.getch()
                if c == 27:
                    with open(conf_filepath, 'w') as fh:
                        json.dump(status, fh)
                    break

                o.addstr(1, 2, "Downloading: {0} ... ".format(path))
                o.refresh()
                req = self.get_req(url, offset, end_size)
                if not req:
                    o.addstr(2, 2, "Network error{0}, retry after 3s.".format(count))
                    o.addstr(3, 2, "Press ESC to abort.")
                    o.refresh()
                    time.sleep(3)
                    continue
                else:
                    try:
                        block = req.read(self.block_size)
                    except: 
                        o.addstr(2, 2, "Can not Read block, retry.".format(offset, end_size))
                        time.sleep(1)
                        continue
                    o.addstr(2, 2, "Process: {0} / {1}".format(offset, end_size))
                    fh.seek(offset)
                    fh.write(block)
                    offset += len(block)
                o.addstr(3, 2, "Press ESC to abort")
                o.refresh()
Beispiel #8
0
    def download(self):
        row = self.row
        if not os.path.exists(row[SAVEDIR_COL]):
            os.makedirs(row[SAVEDIR_COL], exist_ok=True)
        filepath, tmp_filepath, conf_filepath = get_tmp_filepath(
            row[SAVEDIR_COL], row[SAVENAME_COL])

        if os.path.exists(filepath):
            if self.download_mode == DownloadMode.IGNORE:
                self.emit('downloaded', row[FSID_COL])
                logger.debug('File exists, ignored!')
                return
            elif self.download_mode == DownloadMode.NEWCOPY:
                name, ext = os.path.splitext(filepath)
                filepath = '{0}_{1}{2}'.format(name, util.curr_time(), ext)

        url = pcs.get_download_link(self.cookie, self.tokens, row[PATH_COL])
        if not url:
            row[STATE_COL] = State.ERROR
            self.emit('network-error', row[FSID_COL])
            logger.warn('Failed to get url to download')
            return

        if os.path.exists(conf_filepath) and os.path.exists(tmp_filepath):
            with open(conf_filepath) as conf_fh:
                status = json.load(conf_fh)
            threads = len(status)
            file_exists = True
            fh = open(tmp_filepath, 'rb+')
            fh.seek(0)
        else:
            req = net.urlopen_simple(url)
            if not req:
                logger.warn('Failed to get url to download')
                self.emit('network-error', row[FSID_COL])
                return
            content_length = req.getheader('Content-Length')
            # Fixed: baiduPCS using non iso-8859-1 codec in http headers
            if not content_length:
                match = re.search('\sContent-Length:\s*(\d+)',
                                  str(req.headers))
                if not match:
                    logger.warn('Failed to get url to download')
                    self.emit('network-error', row[FSID_COL])
                    return
                content_length = match.group(1)
            size = int(content_length)
            if size == 0:
                open(filepath, 'a').close()
                self.emit('downloaded', row[FSID_COL])
                return
            elif size <= SMALL_FILE_SIZE:
                threads = 1
            else:
                threads = self.default_threads
            average_size, pad_size = divmod(size, threads)
            file_exists = False
            status = []
            fh = open(tmp_filepath, 'wb')
            try:
                fh.truncate(size)
            except (OSError, IOError):
                e = truncate.format_exc()
                logger.error(e)
                self.emit('disk-error', row[FSID_COL], tmp_filepath)
                return

        # task list
        tasks = []
        # message queue
        queue = Queue()
        # threads lock
        lock = threading.RLock()
        for id_ in range(threads):
            if file_exists:
                start_size, end_size, received = status[id_]
                if start_size + received >= end_size:
                    # part of file has been downloaded
                    continue
                start_size += received
            else:
                start_size = id_ * average_size
                end_size = start_size + average_size - 1
                if id_ == threads - 1:
                    end_size = end_size + pad_size + 1
                status.append([start_size, end_size, 0])
            task = DownloadBatch(id_, queue, url, lock, start_size, end_size,
                                 fh, self.timeout)
            tasks.append(task)

        for task in tasks:
            task.start()

        try:
            conf_count = 0
            done = 0
            self.emit('started', row[FSID_COL])
            while row[STATE_COL] == State.DOWNLOADING:
                id_, received = queue.get()
                # FINISHED
                if received == BATCH_FINISISHED:
                    done += 1
                    if done == len(tasks):
                        row[STATE_COL] = State.FINISHED
                        break
                    else:
                        continue
                # error occurs
                elif received == BATCH_ERROR:
                    row[STATE_COL] = State.ERROR
                    break
                status[id_][2] += received
                conf_count += 1
                # flush data and status to disk
                if conf_count > THRESHOLD_TO_FLUSH:
                    with lock:
                        if not fh.closed:
                            fh.flush()
                    with open(conf_filepath, 'w') as fh:
                        json.dump(status, fh)
                    conf_count = 0
                received_total = sum(t[2] for t in status)
                self.emit('received', row[FSID_COL], received, received_total)
        except Exception:
            logger.error(traceback.format_exc())
            row[STATE_COL] = State.ERROR
        with lock:
            if not fh.closed:
                fh.close()
        for task in tasks:
            if task.isAlive():
                task.stop()
        with open(conf_filepath, 'w') as fh:
            json.dump(status, fh)

        if row[STATE_COL] == State.CANCELED:
            if os.path.exists(tmp_filepath):
                os.remove(tmp_filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
        elif row[STATE_COL] == State.ERROR:
            self.emit('network-error', row[FSID_COL])
        elif row[STATE_COL] == State.FINISHED:
            self.emit('downloaded', row[FSID_COL])
            os.rename(tmp_filepath, filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
Beispiel #9
0
    def download(self):
        row = self.row
        if not os.path.exists(row[SAVEDIR_COL]):
            os.makedirs(row[SAVEDIR_COL], exist_ok=True)
        filepath, tmp_filepath, conf_filepath = get_tmp_filepath(
                row[SAVEDIR_COL], row[SAVENAME_COL]) 

        if os.path.exists(filepath):
            print('file exists:', filepath)
            self.emit('downloaded', row[FSID_COL])
            # TODO: ask to confirm overwriting
            # File exists, do nothing
            return

        url = pcs.get_download_link(self.cookie, self.tokens, row[PATH_COL])
        if not url:
            print('Error: Failed to get download link')
            row[STATE_COL] = State.ERROR
            self.emit('network-error', row[FSID_COL])
            return

        if os.path.exists(conf_filepath) and os.path.exists(tmp_filepath):
            with open(conf_filepath) as conf_fh:
                status = json.load(conf_fh)
            threads = len(status)
            file_exists = True
            fh = open(tmp_filepath, 'ab')
        else:
            req = request.urlopen(url)
            if not req:
                self.emit('network-error', row[FSID_COL])
                return
            content_length = req.getheader('Content-Length')
            # Fixed: baiduPCS using non iso-8859-1 codec in http headers
            if not content_length:
                match = re.search('\sContent-Length:\s*(\d+)', str(req.headers))
                if not match:
                    self.emit('network-error', row[FSID_COL])
                    return
                content_length = match.group(1)
            size = int(content_length)
            threads = self.default_threads
            average_size, pad_size = divmod(size, threads)
            file_exists = False
            status = []
            fh = open(tmp_filepath, 'wb')
            fh.truncate(size)

        # task list
        tasks = []
        # message queue
        queue = Queue()
        # threads lock
        lock = threading.RLock()
        for id_ in range(threads):
            if file_exists:
                start_size, end_size, received = status[id_]
                start_size += received
            else:
                start_size = id_ * average_size
                end_size = start_size + average_size - 1
                if id_ == threads - 1:
                    end_size = end_size + pad_size + 1
                status.append([start_size, end_size, 0])
            task = DownloadBatch(id_, queue, url, lock, start_size, end_size,
                                 fh, self.timeout)
            tasks.append(task)

        for task in tasks:
            task.start()

        try:
            conf_count = 0
            done = 0
            self.emit('started', row[FSID_COL])
            while row[STATE_COL] == State.DOWNLOADING:
                id_, received = queue.get()
                # FINISHED
                if received == BATCH_FINISISHED:
                    done += 1
                    if done == len(status):
                        row[STATE_COL] = State.FINISHED
                        break
                    else:
                        continue
                elif received == BATCH_ERROR:
                    row[STATE_COL] = State.ERROR
                    break
                status[id_][2] += received
                conf_count += 1
                if conf_count > THRESHOLD_TO_FLUSH:
                    with open(conf_filepath, 'w') as fh:
                        fh.write(json.dumps(status))
                    conf_count = 0
                received_total = sum(t[2] for t in status)
                self.emit('received', row[FSID_COL], received_total)
                #self.emit('received', row[FSID_COL], received)
        except Exception as e:
            print(e)
            for task in tasks:
                task.stop()
            row[STATE_COL] = State.ERROR
        fh.close()
        with open(conf_filepath, 'w') as fh:
            fh.write(json.dumps(status))

        for task in tasks:
            if not task.isAlive():
                task.stop()

        if row[STATE_COL] == State.CANCELED:
            os.remove(tmp_filepah)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
        elif row[STATE_COL] == State.FINISHED:
            self.emit('downloaded', row[FSID_COL])
            os.rename(tmp_filepath, filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
Beispiel #10
0
    def download(self, o):
        if self.files and self.files[
                self.item_pos] and not self.files[self.item_pos]['isdir']:
            self.screen.nodelay(1)
            end_size = self.files[self.item_pos]['size']
            path = self.files[self.item_pos]['path']
            o.clear()
            o.border(0)
            curses.echo()
            o.addstr(1, 2, "DownloadPath: (Default: ~/)")
            o.refresh()
            download_path = o.getstr(2, 2, 40).decode(encoding='utf-8')
            if download_path == "":
                download_path = expanduser("~")
            else:
                download_path = expanduser(download_path)
            filepath, tmp_filepath, conf_filepath = get_tmp_filepath(
                download_path, path[1:])
            if not os.path.exists(os.path.dirname(tmp_filepath)):
                os.makedirs(os.path.dirname(tmp_filepath), exist_ok=True)
            o.clear()
            o.border(0)
            o.refresh()
            if os.path.exists(filepath):
                o.clear()
                o.border(0)
                o.addstr(1, 2, "File Already Exists.")
                o.addstr(2, 2, "Press Any Key to Continue")
                o.refresh()
                self.screen.getch()
                return
            o.addstr(1, 2, "Getting Download links...")
            o.refresh()
            url = pcs.get_download_link(self.cookie, self.tokens, path)
            if not url:
                o.clear()
                o.border(0)
                o.addstr(1, 2, "Failed to get url")
                o.addstr(2, 2, "Press ESC to abort")
                o.refresh()
                self.screen.getch()
                return

            o.addstr(2, 2, "Prepare file...")
            o.refresh()
            if os.path.exists(conf_filepath) and os.path.exists(tmp_filepath):
                with open(conf_filepath) as conf_fh:
                    status = json.load(conf_fh)
                file_exists = True
                fh = open(tmp_filepath, 'rb+')
            else:
                req = net.urlopen_simple(url)
                if not req:
                    o.clear()
                    o.border(0)
                    o.addstr(1, 2, "Failed to request")
                    o.addstr(2, 2, "Press ESC to abort")
                    o.refresh()
                    self.screen.getch()
                    return
                content_length = req.getheader('Content-Length')
                if not content_length:
                    match = re.search('\sContent-Length:\s*(\d+)',
                                      str(req.headers))
                    if not match:
                        o.clear()
                        o.border(0)
                        o.addstr(1, 2, "Failed to match content-length")
                        o.addstr(2, 2, "Press ESC to abort")
                        o.refresh()
                        self.screen.getch()
                        return
                    content_length = match.group(1)
                size = int(content_length)
                if size == 0:
                    open(filepath, 'a').close()
                    o.clear()
                    o.border(0)
                    o.addstr(1, 2, "File already downloaded")
                    o.addstr(2, 2, "Press ESC to abort")
                    o.refresh()
                    self.screen.getch()
                    return
                file_exists = False
                fh = open(tmp_filepath, 'wb')
                try:
                    fh.truncate(size)
                except (OSError, IOError):
                    o.clear()
                    o.border(0)
                    o.addstr(1, 2, "Disk error (disk is full?)")
                    o.addstr(2, 2, "Press ESC to abort")
                    o.refresh()
                    self.screen.getch()
                    return

            start_size = 0
            if file_exists:
                start_size, end_size, received = status
            offset = start_size
            count = 0
            while offset < end_size:
                status = [offset, end_size, 0]
                count += 1
                o.clear()
                o.border(0)
                c = self.screen.getch()
                if c == 27:
                    with open(conf_filepath, 'w') as fh:
                        json.dump(status, fh)
                    break

                o.addstr(1, 2, "Downloading: {0} ... ".format(path))
                o.refresh()
                req = self.get_req(url, offset, end_size)
                if not req:
                    o.addstr(2, 2,
                             "Network error{0}, retry after 3s.".format(count))
                    o.addstr(3, 2, "Press ESC to abort.")
                    o.refresh()
                    time.sleep(3)
                    continue
                else:
                    try:
                        block = req.read(self.block_size)
                    except:
                        o.addstr(
                            2, 2, "Can not Read block, retry.".format(
                                offset, end_size))
                        time.sleep(1)
                        continue
                    o.addstr(2, 2,
                             "Process: {0} / {1}".format(offset, end_size))
                    fh.seek(offset)
                    fh.write(block)
                    offset += len(block)
                o.addstr(3, 2, "Press ESC to abort")
                o.refresh()
Beispiel #11
0
    def download(self):
        row = self.row
        if not os.path.exists(row[SAVEDIR_COL]):
            os.makedirs(row[SAVEDIR_COL], exist_ok=True)
        filepath, tmp_filepath, conf_filepath = get_tmp_filepath(
            row[SAVEDIR_COL], row[SAVENAME_COL])

        if os.path.exists(filepath):
            if self.download_mode == DownloadMode.IGNORE:
                self.emit('downloaded', row[FSID_COL])
                return
            elif self.download_mode == DownloadMode.NEWCOPY:
                name, ext = os.path.splitext(filepath)
                filepath = '{0}_{1}{2}'.format(name, util.curr_time(), ext)

        url = pcs.get_download_link(self.cookie, self.tokens, row[PATH_COL])
        if not url:
            print('Error: Failed to get download link')
            row[STATE_COL] = State.ERROR
            self.emit('network-error', row[FSID_COL])
            return

        if os.path.exists(conf_filepath) and os.path.exists(tmp_filepath):
            with open(conf_filepath) as conf_fh:
                status = json.load(conf_fh)
            threads = len(status)
            file_exists = True
            fh = open(tmp_filepath, 'ab')
        else:
            req = request.urlopen(url)
            if not req:
                self.emit('network-error', row[FSID_COL])
                return
            content_length = req.getheader('Content-Length')
            # Fixed: baiduPCS using non iso-8859-1 codec in http headers
            if not content_length:
                match = re.search('\sContent-Length:\s*(\d+)',
                                  str(req.headers))
                if not match:
                    self.emit('network-error', row[FSID_COL])
                    return
                content_length = match.group(1)
            size = int(content_length)
            threads = self.default_threads
            average_size, pad_size = divmod(size, threads)
            file_exists = False
            status = []
            fh = open(tmp_filepath, 'wb')
            fh.truncate(size)

        # task list
        tasks = []
        # message queue
        queue = Queue()
        # threads lock
        lock = threading.RLock()
        for id_ in range(threads):
            if file_exists:
                start_size, end_size, received = status[id_]
                start_size += received
            else:
                start_size = id_ * average_size
                end_size = start_size + average_size - 1
                if id_ == threads - 1:
                    end_size = end_size + pad_size + 1
                status.append([start_size, end_size, 0])
            task = DownloadBatch(id_, queue, url, lock, start_size, end_size,
                                 fh, self.timeout)
            tasks.append(task)

        for task in tasks:
            task.start()

        try:
            conf_count = 0
            done = 0
            self.emit('started', row[FSID_COL])
            while row[STATE_COL] == State.DOWNLOADING:
                id_, received = queue.get()
                # FINISHED
                if received == BATCH_FINISISHED:
                    done += 1
                    if done == len(status):
                        row[STATE_COL] = State.FINISHED
                        break
                    else:
                        continue
                elif received == BATCH_ERROR:
                    row[STATE_COL] = State.ERROR
                    break
                status[id_][2] += received
                conf_count += 1
                if conf_count > THRESHOLD_TO_FLUSH:
                    with open(conf_filepath, 'w') as fh:
                        fh.write(json.dumps(status))
                    conf_count = 0
                received_total = sum(t[2] for t in status)
                self.emit('received', row[FSID_COL], received_total)
        except Exception as e:
            print(e)
            for task in tasks:
                task.stop()
            row[STATE_COL] = State.ERROR
        fh.close()
        with open(conf_filepath, 'w') as fh:
            fh.write(json.dumps(status))

        for task in tasks:
            if not task.isAlive():
                task.stop()

        if row[STATE_COL] == State.CANCELED:
            os.remove(tmp_filepah)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)
        elif row[STATE_COL] == State.FINISHED:
            self.emit('downloaded', row[FSID_COL])
            os.rename(tmp_filepath, filepath)
            if os.path.exists(conf_filepath):
                os.remove(conf_filepath)