Ejemplo n.º 1
0
class _DownloadAPI(QObject):
    """Download API based on requests."""

    _sig_download_finished = Signal(str, str)
    _sig_download_progress = Signal(str, str, int, int)
    _sig_partial = Signal(object)

    MAX_THREADS = 20
    DEFAULT_TIMEOUT = 5  # seconds

    def __init__(self, config=None):
        """Download API based on requests."""
        super(QObject, self).__init__()
        self._conda_api = CondaAPI()
        self._client_api = ClientAPI()
        self._config = config
        self._queue = deque()
        self._queue_workers = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()
        self._timer_worker_delete = QTimer()
        self._running_threads = 0
        self._bag_collector = deque()  # Keeps references to old workers

        self._chunk_size = 1024
        self._timer.setInterval(333)
        self._timer.timeout.connect(self._start)
        self._timer_worker_delete.setInterval(5000)
        self._timer_worker_delete.timeout.connect(self._clean_workers)

    def _clean_workers(self):
        """Delete periodically workers in workers bag."""
        while self._bag_collector:
            self._bag_collector.popleft()
        self._timer_worker_delete.stop()

    def _get_verify_ssl(self, verify, set_conda_ssl=True):
        """Get verify ssl."""
        if verify is None:
            verify_value = self._client_api.get_ssl(
                set_conda_ssl=set_conda_ssl, )
        else:
            verify_value = verify
        return verify_value

    def _is_internet_available(self):
        """Check initernet availability."""
        if self._config:
            config_value = self._config.get('main', 'offline_mode')
        else:
            config_value = False

        if config_value:
            connectivity = False
        else:
            connectivity = True  # is_internet_available()

        return connectivity

    @property
    def proxy_servers(self):
        """Return the proxy servers available from the conda rc config file."""
        return self._conda_api.load_proxy_config()

    def _start(self):
        """Start threads and check for inactive workers."""
        if self._queue_workers and self._running_threads < self.MAX_THREADS:
            # print('Queue: {0} Running: {1} Workers: {2} '
            #       'Threads: {3}'.format(len(self._queue_workers),
            #                                 self._running_threads,
            #                                 len(self._workers),
            #                                 len(self._threads)))
            self._running_threads += 1
            thread = QThread()
            worker = self._queue_workers.popleft()
            worker.moveToThread(thread)
            worker.sig_finished.connect(thread.quit)
            thread.started.connect(worker.start)
            thread.start()
            self._threads.append(thread)

        if self._workers:
            for w in self._workers:
                if w.is_finished():
                    self._bag_collector.append(w)
                    self._workers.remove(w)

        if self._threads:
            for t in self._threads:
                if t.isFinished():
                    self._threads.remove(t)
                    self._running_threads -= 1

        if len(self._threads) == 0 and len(self._workers) == 0:
            self._timer.stop()
            self._timer_worker_delete.start()

    def _create_worker(self, method, *args, **kwargs):
        """Create a new worker instance."""
        worker = DownloadWorker(method, args, kwargs)
        self._workers.append(worker)
        self._queue_workers.append(worker)
        self._sig_download_finished.connect(worker.sig_download_finished)
        self._sig_download_progress.connect(worker.sig_download_progress)
        self._sig_partial.connect(worker._handle_partial)
        self._timer.start()
        return worker

    def _download(
        self,
        url,
        path=None,
        force=False,
        verify=None,
        chunked=True,
    ):
        """Callback for download."""
        verify_value = self._get_verify_ssl(verify, set_conda_ssl=False)

        if path is None:
            path = url.split('/')[-1]

        # Make dir if non existent
        folder = os.path.dirname(os.path.abspath(path))

        if not os.path.isdir(folder):
            os.makedirs(folder)

        # Get headers
        if self._is_internet_available():
            try:
                r = requests.head(
                    url,
                    proxies=self.proxy_servers,
                    verify=verify_value,
                    timeout=self.DEFAULT_TIMEOUT,
                )
                status_code = r.status_code
            except Exception as error:
                status_code = -1
                logger.error(str(error))

            logger.debug('Status code {0} - url'.format(status_code, url))

            if status_code != 200:
                logger.error('Invalid url {0}'.format(url))
                return path

            total_size = int(r.headers.get('Content-Length', 0))

            # Check if file exists
            if os.path.isfile(path) and not force:
                file_size = os.path.getsize(path)
            else:
                file_size = -1

            # print(path, total_size, file_size)

            # Check if existing file matches size of requested file
            if file_size == total_size:
                self._sig_download_finished.emit(url, path)
                return path
            else:
                try:
                    r = requests.get(
                        url,
                        stream=chunked,
                        proxies=self.proxy_servers,
                        verify=verify_value,
                        timeout=self.DEFAULT_TIMEOUT,
                    )
                    status_code = r.status_code
                except Exception as error:
                    status_code = -1
                    logger.error(str(error))

            # File not found or file size did not match. Download file.
            progress_size = 0
            bytes_stream = QBuffer()  # BytesIO was segfaulting for big files
            bytes_stream.open(QBuffer.ReadWrite)

            # For some chunked content the app segfaults (with big files)
            # so now chunked is a kwarg for this method
            if chunked:
                for chunk in r.iter_content(chunk_size=self._chunk_size):
                    # print(url, progress_size, total_size)
                    if chunk:
                        bytes_stream.write(chunk)
                        progress_size += len(chunk)
                        self._sig_download_progress.emit(
                            url,
                            path,
                            progress_size,
                            total_size,
                        )

                        self._sig_partial.emit({
                            'url': url,
                            'path': path,
                            'progress_size': progress_size,
                            'total_size': total_size,
                        })

            else:
                bytes_stream.write(r.content)

            bytes_stream.seek(0)
            data = bytes_stream.data()

            with open(path, 'wb') as f:
                f.write(data)

            bytes_stream.close()

        self._sig_download_finished.emit(url, path)

        return path

    def _is_valid_url(self, url, verify=None):
        """Callback for is_valid_url."""
        verify_value = self._get_verify_ssl(verify)

        if self._is_internet_available():
            try:
                r = requests.head(
                    url,
                    proxies=self.proxy_servers,
                    verify=verify_value,
                    timeout=self.DEFAULT_TIMEOUT,
                )
                value = r.status_code in [200]
            except Exception as error:
                logger.error(str(error))
                value = False

        return value

    def _is_valid_channel(
        self,
        channel,
        conda_url='https://conda.anaconda.org',
        verify=None,
    ):
        """Callback for is_valid_channel."""
        verify_value = self._get_verify_ssl(verify)

        if channel.startswith('https://') or channel.startswith('http://'):
            url = channel
        else:
            url = "{0}/{1}".format(conda_url, channel)

        if url[-1] == '/':
            url = url[:-1]

        plat = self._conda_api.get_platform()
        repodata_url = "{0}/{1}/{2}".format(url, plat, 'repodata.json')

        if self._is_internet_available():
            try:
                r = requests.head(
                    repodata_url,
                    proxies=self.proxy_servers,
                    verify=verify_value,
                    timeout=self.DEFAULT_TIMEOUT,
                )
                value = r.status_code in [200]
            except Exception as error:
                logger.error(str(error))
                value = False

        return value

    def _is_valid_api_url(self, url, verify=None):
        """Callback for is_valid_api_url."""
        verify_value = self._get_verify_ssl(verify)

        # Check response is a JSON with ok: 1
        data = {}

        if verify is None:
            verify_value = self._client_api.get_ssl()
        else:
            verify_value = verify

        if self._is_internet_available():
            try:
                r = requests.get(
                    url,
                    proxies=self.proxy_servers,
                    verify=verify_value,
                    timeout=self.DEFAULT_TIMEOUT,
                )
                content = to_text_string(r.content, encoding='utf-8')
                data = json.loads(content)
            except Exception as error:
                logger.error(str(error))

        return data.get('ok', 0) == 1

    def _get_url(self, url, as_json=False, verify=None):
        """Callback for url checking."""
        data = {}
        verify_value = self._get_verify_ssl(verify)

        if self._is_internet_available():
            try:
                # See: https://github.com/ContinuumIO/navigator/issues/1485
                session = requests.Session()
                retry = Retry(connect=3, backoff_factor=0.5)
                adapter = HTTPAdapter(max_retries=retry)
                session.mount('http://', adapter)
                session.mount('https://', adapter)

                r = session.get(
                    url,
                    proxies=self.proxy_servers,
                    verify=verify_value,
                    timeout=self.DEFAULT_TIMEOUT,
                )
                data = to_text_string(r.content, encoding='utf-8')

                if as_json:
                    data = json.loads(data)

            except Exception as error:
                logger.error(str(error))

        return data

    def _get_api_info(self, url, verify=None):
        """Callback."""
        verify_value = self._get_verify_ssl(verify)
        data = {
            "api_url": url,
            "api_docs_url": "https://api.anaconda.org/docs",
            "conda_url": "https://conda.anaconda.org/",
            "main_url": "https://anaconda.org/",
            "pypi_url": "https://pypi.anaconda.org/",
            "swagger_url": "https://api.anaconda.org/swagger.json",
        }
        if self._is_internet_available():
            try:
                r = requests.get(
                    url,
                    proxies=self.proxy_servers,
                    verify=verify_value,
                    timeout=self.DEFAULT_TIMEOUT,
                )
                content = to_text_string(r.content, encoding='utf-8')
                new_data = json.loads(content)
                data['conda_url'] = new_data.get('conda_url',
                                                 data['conda_url'])
            except Exception as error:
                logger.error(str(error))

        return data

    # --- Public API
    # -------------------------------------------------------------------------
    def download(self, url, path=None, force=False, verify=None, chunked=True):
        """Download file given by url and save it to path."""
        logger.debug(str((url, path, force)))
        method = self._download
        return self._create_worker(
            method,
            url,
            path=path,
            force=force,
            verify=verify,
            chunked=chunked,
        )

    def terminate(self):
        """Terminate all workers and threads."""
        for t in self._threads:
            t.quit()
        self._thread = []
        self._workers = []

    def is_valid_url(self, url, non_blocking=True):
        """Check if url is valid."""
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_url
            return self._create_worker(method, url)
        else:
            return self._is_valid_url(url)

    def is_valid_api_url(self, url, non_blocking=True, verify=None):
        """Check if anaconda api url is valid."""
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_api_url
            return self._create_worker(method, url, verify=verify)
        else:
            return self._is_valid_api_url(url=url, verify=verify)

    def is_valid_channel(
        self,
        channel,
        conda_url='https://conda.anaconda.org',
        non_blocking=True,
    ):
        """Check if a conda channel is valid."""
        logger.debug(str((channel, conda_url)))
        if non_blocking:
            method = self._is_valid_channel
            return self._create_worker(method, channel, conda_url)
        else:
            return self._is_valid_channel(channel, conda_url=conda_url)

    def get_url(self, url, as_json=False, verify=None, non_blocking=True):
        """Get url content."""
        logger.debug(str(url))
        if non_blocking:
            method = self._get_url
            return self._create_worker(method,
                                       url,
                                       as_json=as_json,
                                       verify=verify)
        else:
            return self._get_url(url, as_json=as_json, verify=verify)

    def get_api_info(self, url, non_blocking=True):
        """Query anaconda api info."""
        logger.debug(str((url, non_blocking)))
        if non_blocking:
            method = self._get_api_info
            return self._create_worker(method, url)
        else:
            return self._get_api_info(url)
Ejemplo n.º 2
0
 def package_version(self, pkg, name='root'):
     api = CondaAPI()
     return api.package_version(name=name, pkg=pkg, build=True)
Ejemplo n.º 3
0
class _ClientAPI(QObject):
    """Anaconda Client API wrapper."""

    DEFAULT_TIMEOUT = 6

    def __init__(self):
        """Anaconda Client API wrapper."""
        super(QObject, self).__init__()
        self._anaconda_client_api = binstar_client.utils.get_server_api(
            log_level=logging.NOTSET)
        self._queue = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()
        self._conda_api = CondaAPI()
        self.config = CONF

        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)

    def _clean(self):
        """Check for inactive workers and remove their references."""
        if self._workers:
            for w in self._workers:
                if w.is_finished():
                    self._workers.remove(w)

        if self._threads:
            for t in self._threads:
                if t.isFinished():
                    self._threads.remove(t)
        else:
            self._timer.stop()

    def _start(self):
        """Take avalaible worker from the queue and start it."""
        if len(self._queue) == 1:
            thread = self._queue.popleft()
            thread.start()
            self._timer.start()

    def _create_worker(self, method, *args, **kwargs):
        """Create a worker for this client to be run in a separate thread."""
        # FIXME: this might be heavy...
        thread = QThread()
        worker = ClientWorker(method, args, kwargs)
        worker.moveToThread(thread)
        worker.sig_finished.connect(self._start)
        worker.sig_finished.connect(thread.quit)
        thread.started.connect(worker.start)
        self._queue.append(thread)
        self._threads.append(thread)
        self._workers.append(worker)
        self._start()
        return worker

    # --- Callbacks
    # -------------------------------------------------------------------------
    @staticmethod
    def _load_repodata(repodata, metadata=None, python_version=None):
        """
        Load all the available package information.

        See load_repadata for full documentation.
        """
        metadata = metadata if metadata else {}
        # python_version = '.'.join(python_version.split('.')[:2])

        all_packages = {}
        for channel_url, data in repodata.items():
            packages = data.get('packages', {})

            for canonical_name in packages:
                data = packages[canonical_name]
                # Do not filter based on python version
                # if (python_version and not is_dependency_met(
                #         data['depends'], python_version, 'python')):
                #     continue
                name, version, b = tuple(canonical_name.rsplit('-', 2))

                if name not in all_packages:
                    all_packages[name] = {
                        'versions': set(),
                        'size': {},
                        'type': {},
                        'app_entry': {},
                        'app_type': {},
                    }
                elif name in metadata:
                    temp_data = all_packages[name]
                    temp_data['home'] = metadata[name].get('home', '')
                    temp_data['license'] = metadata[name].get('license', '')
                    temp_data['summary'] = metadata[name].get('summary', '')
                    temp_data['latest_version'] = metadata[name].get('version')
                    all_packages[name] = temp_data

                all_packages[name]['versions'].add(version)
                all_packages[name]['size'][version] = data.get('size', '')

                # Only the latest builds will have the correct metadata for
                # apps, so only store apps that have the app metadata
                if data.get('type'):
                    all_packages[name]['type'][version] = data.get('type')
                    all_packages[name]['app_entry'][version] = data.get(
                        'app_entry')
                    all_packages[name]['app_type'][version] = data.get(
                        'app_type')

        all_apps = {}
        for name in all_packages:
            versions = sort_versions(list(all_packages[name]['versions']))
            all_packages[name]['versions'] = versions[:]

            for version in versions:
                has_type = all_packages[name].get('type')
                # Has type in this case implies being an app
                if has_type:
                    all_apps[name] = all_packages[name].copy()
                    # Remove all versions that are not apps!
                    versions = all_apps[name]['versions'][:]
                    types = all_apps[name]['type']
                    app_versions = [v for v in versions if v in types]
                    all_apps[name]['versions'] = app_versions

        return all_packages, all_apps

    @staticmethod
    def _prepare_model_data(packages, linked, pip=None):
        """Prepare model data for the packages table model."""
        pip = pip if pip else []
        data = []
        linked_packages = {}
        for canonical_name in linked:
            name, version, b = tuple(canonical_name.rsplit('-', 2))
            linked_packages[name] = {'version': version}

        pip_packages = {}
        for canonical_name in pip:
            name, version, b = tuple(canonical_name.rsplit('-', 2))
            pip_packages[name] = {'version': version}

        packages_names = sorted(
            list(
                set(
                    list(linked_packages.keys()) + list(pip_packages.keys()) +
                    list(packages.keys())), ))

        for name in packages_names:
            p_data = packages.get(name)

            summary = p_data.get('summary', '') if p_data else ''
            url = p_data.get('home', '') if p_data else ''
            license_ = p_data.get('license', '') if p_data else ''
            versions = p_data.get('versions', '') if p_data else []
            version = p_data.get('latest_version', '') if p_data else ''

            if name in pip_packages:
                type_ = C.PIP_PACKAGE
                version = pip_packages[name].get('version', '')
                status = C.INSTALLED
            elif name in linked_packages:
                type_ = C.CONDA_PACKAGE
                version = linked_packages[name].get('version', '')
                status = C.INSTALLED

                if version in versions:
                    vers = versions
                    upgradable = not version == vers[-1] and len(vers) != 1
                    downgradable = not version == vers[0] and len(vers) != 1

                    if upgradable and downgradable:
                        status = C.MIXGRADABLE
                    elif upgradable:
                        status = C.UPGRADABLE
                    elif downgradable:
                        status = C.DOWNGRADABLE
            else:
                type_ = C.CONDA_PACKAGE
                status = C.NOT_INSTALLED

                if version == '' and len(versions) != 0:
                    version = versions[-1]

            row = {
                C.COL_ACTION: C.ACTION_NONE,
                C.COL_PACKAGE_TYPE: type_,
                C.COL_NAME: name,
                C.COL_DESCRIPTION: summary.capitalize(),
                C.COL_VERSION: version,
                C.COL_STATUS: status,
                C.COL_URL: url,
                C.COL_LICENSE: license_,
                C.COL_ACTION_VERSION: None,
            }

            data.append(row)
        return data

    def _get_user_licenses(self, products=None):
        """Get user trial/paid licenses from anaconda.org."""
        license_data = []
        try:
            res = self._anaconda_client_api.user_licenses()
            license_data = res.get('data', [])

            # This should be returning a dict or list not a json string!
            if is_text_string(license_data):
                license_data = json.loads(license_data)
        except Exception:
            time.sleep(0.3)

        return license_data

    # --- Public API
    # -------------------------------------------------------------------------
    def token(self):
        """Return the current token registered with authenticate."""
        return self._anaconda_client_api.token

    def load_token(self):
        """Load current authenticated token."""
        token = None
        try:
            token = binstar_client.utils.load_token(self.get_api_url())
        except Exception:
            pass
        return token

    def _login(self, username, password, application, application_url):
        """Login callback."""
        new_token = self._anaconda_client_api.authenticate(
            username, password, application, application_url)
        args = Args()
        args.site = None
        args.token = new_token
        binstar_client.utils.store_token(new_token, args)
        return new_token

    def login(self, username, password, application, application_url):
        """Login to anaconda server."""
        logger.debug(str((username, application, application_url)))
        method = self._login
        return self._create_worker(method, username, password, application,
                                   application_url)

    def logout(self):
        """
        Logout from anaconda.org.

        This method removes the authentication and removes the token.
        """
        error = None
        args = Args()
        args.site = None
        args.token = self.token

        binstar_client.utils.remove_token(args)
        if self.token:
            try:
                self._anaconda_client_api.remove_authentication()
            except binstar_client.errors.Unauthorized as e:
                error = e
                logger.debug("The token that you are trying to remove may "
                             "not be valid {}".format(e))
            except Exception as e:
                error = e
                logger.debug("The certificate might be invalid. {}".format(e))

        logger.info("logout successful")
        return error

    def load_repodata(self, repodata, metadata=None, python_version=None):
        """
        Load all the available packages information for downloaded repodata.

        For downloaded repodata files (repo.continuum.io), additional
        data provided (anaconda cloud), and additional metadata and merge into
        a single set of packages and apps.

        If python_version is not none, exclude all package/versions which
        require an incompatible version of python.

        Parameters
        ----------
        repodata: dict of dicts
            Data loaded from the conda cache directories.
        metadata: dict
            Metadata info form different sources. For now only from
            repo.continuum.
        python_version: str
            Python version used in preprocessing.
        """
        logger.debug('')
        method = self._load_repodata
        return self._create_worker(
            method,
            repodata,
            metadata=metadata,
            python_version=python_version,
        )

    def prepare_model_data(self, packages, linked, pip=None):
        """Prepare downloaded package info along with pip pacakges info."""
        logger.debug('')
        method = self._prepare_model_data
        return self._create_worker(
            method,
            packages,
            linked,
            pip=pip,
        )

    def set_domain(self, domain='https://api.anaconda.org'):
        """Reset current api domain."""
        logger.debug('Setting domain {}'.format(domain))
        config = binstar_client.utils.get_config()
        config['url'] = domain

        try:
            binstar_client.utils.set_config(config)
        except binstar_client.errors.BinstarError:
            logger.error('Could not write anaconda client configuation')
            traceback = format_exc()
            msg_box = MessageBoxError(
                title='Anaconda Client configuration error',
                text='Anaconda Client domain could not be updated.<br><br>'
                'This may result in Navigator not working properly.<br>',
                error='<pre>' + traceback + '</pre>',
                report=False,
                learn_more=None,
            )
            msg_box.exec_()

        self._anaconda_client_api = binstar_client.utils.get_server_api(
            token=None,
            log_level=logging.NOTSET,
        )

    def user(self):
        """Return current logged user information."""
        return self.organizations(login=None)

    def domain(self):
        """Return current domain."""
        return self._anaconda_client_api.domain

    def packages(self,
                 login=None,
                 platform=None,
                 package_type=None,
                 type_=None,
                 access=None):
        """Return all the available packages for a given user.

        Parameters
        ----------
        type_: Optional[str]
            Only find packages that have this conda `type`, (i.e. 'app').
        access : Optional[str]
            Only find packages that have this access level (e.g. 'private',
            'authenticated', 'public').
        """
        logger.debug('')
        method = self._anaconda_client_api.user_packages
        return self._create_worker(
            method,
            login=login,
            platform=platform,
            package_type=package_type,
            type_=type_,
            access=access,
        )

    def organizations(self, login):
        """List all the organizations a user has access to."""
        try:
            user = self._anaconda_client_api.user(login=login)
        except Exception:
            user = {}
        return user

    @staticmethod
    def get_api_url():
        """Get the anaconda client url configuration."""
        config_data = binstar_client.utils.get_config()
        return config_data.get('url', 'https://api.anaconda.org')

    @staticmethod
    def set_api_url(url):
        """Set the anaconda client url configuration."""
        config_data = binstar_client.utils.get_config()
        config_data['url'] = url
        try:
            binstar_client.utils.set_config(config_data)
        except Exception as e:
            logger.error('Could not write anaconda client configuration')
            msg_box = MessageBoxError(
                title='Anaconda Client configuration error',
                text='Anaconda Client configuration could not be updated.<br>'
                'This may result in Navigator not working properly.<br>',
                error=e,
                report=False,
                learn_more=None,
            )
            msg_box.exec_()

    def get_ssl(self):
        """
        Get the anaconda client url configuration and set conda accordingly.
        """
        config_data = binstar_client.utils.get_config()
        value = config_data.get('verify_ssl', True)
        # ssl_verify = self._conda_api.config_get('ssl_verify').communicate()
        # if ssl_verify != value:   # FIXME: Conda rstricted acces to the key
        #     self._conda_api.config_set('ssl_verify', value).communicate()
        self._conda_api.config_set('ssl_verify', value).communicate()
        return value

    def set_ssl(self, value):
        """Set the anaconda client url configuration."""
        config_data = binstar_client.utils.get_config()
        config_data['verify_ssl'] = value
        try:
            binstar_client.utils.set_config(config_data)
            self._conda_api.config_set('ssl_verify', value).communicate()
        except Exception as e:
            logger.error('Could not write anaconda client configuration')
            msg_box = MessageBoxError(
                title='Anaconda Client configuration error',
                text='Anaconda Client configuration could not be updated.<br>'
                'This may result in Navigator not working properly.<br>',
                error=e,
                report=False,
                learn_more=None,
            )
            msg_box.exec_()

    def get_user_licenses(self, products=None):
        """Get user trial/paid licenses from anaconda.org."""
        logger.debug(str((products)))
        method = self._get_user_licenses
        return self._create_worker(method, products=products)

    def _get_api_info(self, url, proxy_servers=None, verify=True):
        """Callback."""
        proxy_servers = proxy_servers or {}
        data = {
            "api_url": url,
            "api_docs_url": "https://api.anaconda.org/docs",
            "brand": DEFAULT_BRAND,
            "conda_url": "https://conda.anaconda.org",
            "main_url": "https://anaconda.org",
            "pypi_url": "https://pypi.anaconda.org",
            "swagger_url": "https://api.anaconda.org/swagger.json",
        }
        try:
            r = requests.get(
                url,
                proxies=proxy_servers,
                verify=verify,
                timeout=self.DEFAULT_TIMEOUT,
            )
            content = to_text_string(r.content, encoding='utf-8')
            new_data = json.loads(content)

            # Enforce no trailing slash
            for key, value in new_data.items():
                if is_text_string(value):
                    data[key] = value[:-1] if value[-1] == '/' else value

        except Exception as error:
            logger.error(str(error))
        return data

    def get_api_info(self, url, proxy_servers=None, verify=True):
        """Query anaconda api info."""
        logger.debug(str((url)))
        proxy_servers = proxy_servers or {}
        method = self._get_api_info
        return self._create_worker(method,
                                   url,
                                   proxy_servers=proxy_servers,
                                   verify=verify)