Esempio n. 1
0
class _DownloadAPI(QObject):
    """Download API based on QNetworkAccessManager."""

    def __init__(self, chunk_size=1024, load_rc_func=None):
        """Download API based on QNetworkAccessManager."""
        super(_DownloadAPI, self).__init__()
        self._chunk_size = chunk_size
        self._head_requests = {}
        self._get_requests = {}
        self._paths = {}
        self._workers = {}

        self._load_rc_func = load_rc_func
        self._manager = QNetworkAccessManager(self)
        self._proxy_factory = NetworkProxyFactory(load_rc_func=load_rc_func)
        self._timer = QTimer()

        # Setup
        self._manager.setProxyFactory(self._proxy_factory)
        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)

        # Signals
        self._manager.finished.connect(self._request_finished)
        self._manager.sslErrors.connect(self._handle_ssl_errors)
        self._manager.proxyAuthenticationRequired.connect(
            self._handle_proxy_auth)

    @staticmethod
    def _handle_ssl_errors(reply, errors):
        """Callback for ssl_errors."""
        logger.error(str(('SSL Errors', errors, reply)))

    @staticmethod
    def _handle_proxy_auth(proxy, authenticator):
        """Callback for ssl_errors."""
#        authenticator.setUser('1')`
#        authenticator.setPassword('1')
        logger.error(str(('Proxy authentication Error. '
                          'Enter credentials in condarc',
                          proxy,
                          authenticator)))

    def _clean(self):
        """Check for inactive workers and remove their references."""
        if self._workers:
            for url in self._workers.copy():
                w = self._workers[url]
                if w.is_finished():
                    self._workers.pop(url)
                    self._paths.pop(url)
                    if url in self._get_requests:
                        self._get_requests.pop(url)

        else:
            self._timer.stop()

    def _request_finished(self, reply):
        """Callback for download once the request has finished."""
        url = to_text_string(reply.url().toEncoded(), encoding='utf-8')

        if url in self._paths:
            path = self._paths[url]
        if url in self._workers:
            worker = self._workers[url]

        if url in self._head_requests:
            error = reply.error()
#            print(url, error)
            if error:
                logger.error(str(('Head Reply Error:', error)))
                worker.sig_download_finished.emit(url, path)
                worker.sig_finished.emit(worker, path, error)
                return

            self._head_requests.pop(url)
            start_download = not bool(error)
            header_pairs = reply.rawHeaderPairs()
            headers = {}

            for hp in header_pairs:
                headers[to_text_string(hp[0]).lower()] = to_text_string(hp[1])

            total_size = int(headers.get('content-length', 0))

            # Check if file exists
            if os.path.isfile(path):
                file_size = os.path.getsize(path)

                # Check if existing file matches size of requested file
                start_download = file_size != total_size

            if start_download:
                # File sizes dont match, hence download file
                qurl = QUrl(url)
                request = QNetworkRequest(qurl)
                self._get_requests[url] = request
                reply = self._manager.get(request)

                error = reply.error()
                if error:
                    logger.error(str(('Reply Error:', error)))

                reply.downloadProgress.connect(
                    lambda r, t, w=worker: self._progress(r, t, w))
            else:
                # File sizes match, dont download file or error?
                worker.finished = True
                worker.sig_download_finished.emit(url, path)
                worker.sig_finished.emit(worker, path, None)
        elif url in self._get_requests:
            data = reply.readAll()
            self._save(url, path, data)

    def _save(self, url, path, data):
        """Save `data` of downloaded `url` in `path`."""
        worker = self._workers[url]
        path = self._paths[url]

        if len(data):
            try:
                with open(path, 'wb') as f:
                    f.write(data)
            except Exception:
                logger.error((url, path))

        # Clean up
        worker.finished = True
        worker.sig_download_finished.emit(url, path)
        worker.sig_finished.emit(worker, path, None)
        self._get_requests.pop(url)
        self._workers.pop(url)
        self._paths.pop(url)

    @staticmethod
    def _progress(bytes_received, bytes_total, worker):
        """Return download progress."""
        worker.sig_download_progress.emit(
            worker.url, worker.path, bytes_received, bytes_total)

    def download(self, url, path):
        """Download url and save data to path."""
        # original_url = url
#        print(url)
        qurl = QUrl(url)
        url = to_text_string(qurl.toEncoded(), encoding='utf-8')

        logger.debug(str((url, path)))
        if url in self._workers:
            while not self._workers[url].finished:
                return self._workers[url]

        worker = DownloadWorker(url, path)

        # Check download folder exists
        folder = os.path.dirname(os.path.abspath(path))
        if not os.path.isdir(folder):
            os.makedirs(folder)

        request = QNetworkRequest(qurl)
        self._head_requests[url] = request
        self._paths[url] = path
        self._workers[url] = worker
        self._manager.head(request)
        self._timer.start()

        return worker

    def terminate(self):
        """Terminate all download workers and threads."""
        pass