コード例 #1
0
def test_ssl():
    api = ClientAPI()
    default = True
    test = False
    # Switch from beta and back
    api.set_ssl(test)
    assert api.get_ssl() == test
    api.set_ssl(default)
    assert api.get_ssl() == default
コード例 #2
0
class _DownloadAPI(QObject):
    """Download API based on requests."""

    _sig_download_finished = Signal(str, str)
    _sig_download_progress = Signal(str, str, int, int)

    MAX_THREADS = 20
    DEFAULT_TIMEOUT = 5  # seconds

    def __init__(self):
        """Download API based on requests."""
        super(QObject, self).__init__()
        self._conda_api = CondaAPI()
        self._client_api = ClientAPI()
        self._queue = deque()
        self._queue_workers = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()
        self._timer_worker_delete = QTimer()
        self._running_threads = 0
        self._bag_collector = deque()  # Keeps references to old workers

        self._chunk_size = 1024
        self._timer.setInterval(333)
        self._timer.timeout.connect(self._start)
        self._timer_worker_delete.setInterval(5000)
        self._timer_worker_delete.timeout.connect(self._clean_workers)

    def _clean_workers(self):
        """Delete periodically workers in workers bag."""
        while self._bag_collector:
            self._bag_collector.popleft()
        self._timer_worker_delete.stop()

    @property
    def proxy_servers(self):
        """Return the proxy servers available from the conda rc config file."""
        return self._conda_api.load_proxy_config()

    def _start(self):
        """Start threads and check for inactive workers."""
        if self._queue_workers and self._running_threads < self.MAX_THREADS:
            # print('Queue: {0} Running: {1} Workers: {2} '
            #       'Threads: {3}'.format(len(self._queue_workers),
            #                                 self._running_threads,
            #                                 len(self._workers),
            #                                 len(self._threads)))
            self._running_threads += 1
            thread = QThread()
            worker = self._queue_workers.popleft()
            worker.moveToThread(thread)
            worker.sig_finished.connect(thread.quit)
            thread.started.connect(worker.start)
            thread.start()
            self._threads.append(thread)

        if self._workers:
            for w in self._workers:
                if w.is_finished():
                    self._bag_collector.append(w)
                    self._workers.remove(w)

        if self._threads:
            for t in self._threads:
                if t.isFinished():
                    self._threads.remove(t)
                    self._running_threads -= 1

        if len(self._threads) == 0 and len(self._workers) == 0:
            self._timer.stop()
            self._timer_worker_delete.start()

    def _create_worker(self, method, *args, **kwargs):
        """Create a new worker instance."""
        worker = DownloadWorker(method, args, kwargs)
        self._workers.append(worker)
        self._queue_workers.append(worker)
        self._sig_download_finished.connect(worker.sig_download_finished)
        self._sig_download_progress.connect(worker.sig_download_progress)
        self._timer.start()
        return worker

    def _download(
        self,
        url,
        path=None,
        force=False,
        verify=True,
        chunked=True,
    ):
        """Callback for download."""
        if path is None:
            path = url.split('/')[-1]

        # Make dir if non existent
        folder = os.path.dirname(os.path.abspath(path))

        if not os.path.isdir(folder):
            os.makedirs(folder)

        # Get headers
        try:
            r = requests.head(
                url,
                proxies=self.proxy_servers,
                verify=verify,
                timeout=self.DEFAULT_TIMEOUT,
            )
            status_code = r.status_code
        except Exception as error:
            status_code = -1
            logger.error(str(error))

        logger.debug('Status code {0} - url'.format(status_code, url))

        if status_code != 200:
            logger.error('Invalid url {0}'.format(url))
            return path

        total_size = int(r.headers.get('Content-Length', 0))

        # Check if file exists
        if os.path.isfile(path) and not force:
            file_size = os.path.getsize(path)
        else:
            file_size = -1

        # print(path, total_size, file_size)

        # Check if existing file matches size of requested file
        if file_size == total_size:
            self._sig_download_finished.emit(url, path)
            return path
        else:
            try:
                r = requests.get(
                    url,
                    stream=chunked,
                    proxies=self.proxy_servers,
                    verify=verify,
                    timeout=self.DEFAULT_TIMEOUT,
                )
                status_code = r.status_code
            except Exception as error:
                status_code = -1
                logger.error(str(error))

        # File not found or file size did not match. Download file.
        progress_size = 0
        bytes_stream = QBuffer()  # BytesIO was segfaulting for big files
        bytes_stream.open(QBuffer.ReadWrite)

        # For some chunked content the app segfaults (with big files)
        # so now chunked is a kwarg for this method
        if chunked:
            for chunk in r.iter_content(chunk_size=self._chunk_size):
                # print(url, progress_size, total_size)
                if chunk:
                    bytes_stream.write(chunk)
                    progress_size += len(chunk)
                    self._sig_download_progress.emit(
                        url,
                        path,
                        progress_size,
                        total_size,
                    )
        else:
            bytes_stream.write(r.content)

        bytes_stream.seek(0)
        data = bytes_stream.data()

        with open(path, 'wb') as f:
            f.write(data)

        bytes_stream.close()

        self._sig_download_finished.emit(url, path)

        return path

    def _is_valid_url(self, url):
        """Callback for is_valid_url."""
        try:
            r = requests.head(
                url,
                proxies=self.proxy_servers,
                timeout=self.DEFAULT_TIMEOUT,
            )
            value = r.status_code in [200]
        except Exception as error:
            logger.error(str(error))
            value = False

        return value

    def _is_valid_channel(self,
                          channel,
                          conda_url='https://conda.anaconda.org'):
        """Callback for is_valid_channel."""
        if channel.startswith('https://') or channel.startswith('http://'):
            url = channel
        else:
            url = "{0}/{1}".format(conda_url, channel)

        if url[-1] == '/':
            url = url[:-1]

        plat = self._conda_api.get_platform()
        repodata_url = "{0}/{1}/{2}".format(url, plat, 'repodata.json')

        try:
            r = requests.head(
                repodata_url,
                proxies=self.proxy_servers,
                verify=self._client_api.get_ssl(),
                timeout=self.DEFAULT_TIMEOUT,
            )
            value = r.status_code in [200]
        except Exception as error:
            logger.error(str(error))
            value = False

        return value

    def _is_valid_api_url(self, url, verify=None):
        """Callback for is_valid_api_url."""
        # Check response is a JSON with ok: 1
        data = {}

        if verify is None:
            verify_value = self._client_api.get_ssl()
        else:
            verify_value = verify

        try:
            r = requests.get(
                url,
                proxies=self.proxy_servers,
                verify=verify_value,
                timeout=self.DEFAULT_TIMEOUT,
            )
            content = to_text_string(r.content, encoding='utf-8')
            data = json.loads(content)
        except Exception as error:
            logger.error(str(error))

        return data.get('ok', 0) == 1

    # --- Public API
    # -------------------------------------------------------------------------
    def download(self, url, path=None, force=False, verify=True, chunked=True):
        """Download file given by url and save it to path."""
        logger.debug(str((url, path, force)))
        method = self._download
        return self._create_worker(
            method,
            url,
            path=path,
            force=force,
            verify=verify,
            chunked=chunked,
        )

    def terminate(self):
        """Terminate all workers and threads."""
        for t in self._threads:
            t.quit()
        self._thread = []
        self._workers = []

    def is_valid_url(self, url, non_blocking=True):
        """Check if url is valid."""
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_url
            return self._create_worker(method, url)
        else:
            return self._is_valid_url(url)

    def is_valid_api_url(self, url, non_blocking=True, verify=True):
        """Check if anaconda api url is valid."""
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_api_url
            return self._create_worker(method, url, verify=verify)
        else:
            return self._is_valid_api_url(url=url, verify=verify)

    def is_valid_channel(self,
                         channel,
                         conda_url='https://conda.anaconda.org',
                         non_blocking=True):
        """Check if a conda channel is valid."""
        logger.debug(str((channel, conda_url)))
        if non_blocking:
            method = self._is_valid_channel
            return self._create_worker(method, channel, conda_url)
        else:
            return self._is_valid_channel(channel, conda_url=conda_url)

    def _get_api_info(self, url):
        """Callback."""
        data = {
            "api_url": url,
            "api_docs_url": "https://api.anaconda.org/docs",
            "conda_url": "https://conda.anaconda.org/",
            "main_url": "https://anaconda.org/",
            "pypi_url": "https://pypi.anaconda.org/",
            "swagger_url": "https://api.anaconda.org/swagger.json",
        }
        try:
            r = requests.get(
                url,
                proxies=self.proxy_servers,
                verify=self._client_api.get_ssl(),
                timeout=self.DEFAULT_TIMEOUT,
            )
            content = to_text_string(r.content, encoding='utf-8')
            new_data = json.loads(content)
            data['conda_url'] = new_data.get('conda_url', data['conda_url'])
        except Exception as error:
            logger.error(str(error))
        return data

    def get_api_info(self, url, non_blocking=True):
        """Query anaconda api info."""
        logger.debug(str((url, non_blocking)))
        if non_blocking:
            method = self._get_api_info
            return self._create_worker(method, url)
        else:
            return self._get_api_info(url)