def __init__(self):
        super(QObject, self).__init__()
        self._conda_api = CondaAPI()
        self._queue = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()

        self._chunk_size = 1024
        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)
Exemple #2
0
    def __init__(self, load_rc_func=None):
        """Download API based on requests."""
        super(QObject, self).__init__()
        self._conda_api = CondaAPI()
        self._queue = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()

        self._load_rc_func = load_rc_func
        self._chunk_size = 1024
        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)
    def __init__(self):
        super(QObject, self).__init__()
        self._conda_api = CondaAPI()
        self._queue = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()

        self._chunk_size = 1024
        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)
    def __init__(self):
        super(QObject, self).__init__()
        self._anaconda_client_api = binstar_client.utils.get_server_api(
            log_level=logging.NOTSET)
        self._queue = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()
        self._conda_api = CondaAPI()

        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)
Exemple #5
0
    def __init__(self):
        """Anaconda Manager API process worker."""
        super(_ManagerAPI, self).__init__()

        # API's
        self._conda_api = CondaAPI()
        self._client_api = ClientAPI()
        self._download_api = DownloadAPI(load_rc_func=self._conda_api.load_rc)
        self._requests_download_api = RequestsDownloadAPI(
            load_rc_func=self._conda_api.load_rc)
        self.ROOT_PREFIX = self._conda_api.ROOT_PREFIX

        # Vars
        self._checking_repos = None
        self._data_directory = None
        self._files_downloaded = None
        self._repodata_files = None
        self._valid_repos = None

        # Expose some methods for convenient access. Methods return a worker
        self.conda_create = self._conda_api.create
        self.conda_create_yaml = self._conda_api.create_from_yaml
        self.conda_clone = self._conda_api.clone_environment
        self.conda_dependencies = self._conda_api.dependencies
        self.conda_get_condarc_channels = self._conda_api.get_condarc_channels
        self.conda_install = self._conda_api.install
        self.conda_remove = self._conda_api.remove
        self.conda_terminate = self._conda_api.terminate_all_processes
        self.conda_config_add = self._conda_api.config_add
        self.conda_config_remove = self._conda_api.config_remove
        self.pip_list = self._conda_api.pip_list
        self.pip_remove = self._conda_api.pip_remove

        # No workers are returned for these methods
        self.conda_clear_lock = self._conda_api.clear_lock
        self.conda_environment_exists = self._conda_api.environment_exists
        self.conda_get_envs = self._conda_api.get_envs
        self.conda_linked = self._conda_api.linked
        self.conda_get_prefix_envname = self._conda_api.get_prefix_envname
        self.conda_package_version = self._conda_api.package_version
        self.conda_platform = self._conda_api.get_platform

        # These download methods return a worker
        get_api_info = self._requests_download_api.get_api_info
        is_valid_url = self._requests_download_api.is_valid_api_url
        is_valid_channel = self._requests_download_api.is_valid_channel
        terminate = self._requests_download_api.terminate
        self.download_requests = self._requests_download_api.download
        self.download_async = self._download_api.download
        self.download_async_terminate = self._download_api.terminate
        self.download_is_valid_url = self._requests_download_api.is_valid_url
        self.download_is_valid_api_url = is_valid_url
        self.download_get_api_info = lambda: get_api_info(self._client_api.
                                                          get_api_url())
        self.download_is_valid_channel = is_valid_channel
        self.download_requests_terminate = terminate

        # These client methods return a worker
        self.client_store_token = self._client_api.store_token
        self.client_remove_token = self._client_api.remove_token
        self.client_login = self._client_api.login
        self.client_logout = self._client_api.logout
        self.client_load_repodata = self._client_api.load_repodata
        self.client_prepare_packages_data = self._client_api.prepare_model_data
        self.client_user = self._client_api.user
        self.client_domain = self._client_api.domain
        self.client_set_domain = self._client_api.set_domain
        self.client_packages = self._client_api.packages
        self.client_multi_packages = self._client_api.multi_packages
        self.client_organizations = self._client_api.organizations
        self.client_load_token = self._client_api.load_token
        self.client_get_api_url = self._client_api.get_api_url
        self.client_set_api_url = self._client_api.set_api_url
Exemple #6
0
class _ManagerAPI(QObject):
    """Anaconda Manager API process worker."""

    sig_repodata_updated = Signal(object)
    sig_repodata_errored = Signal()

    def __init__(self):
        """Anaconda Manager API process worker."""
        super(_ManagerAPI, self).__init__()

        # API's
        self._conda_api = CondaAPI()
        self._client_api = ClientAPI()
        self._download_api = DownloadAPI(load_rc_func=self._conda_api.load_rc)
        self._requests_download_api = RequestsDownloadAPI(
            load_rc_func=self._conda_api.load_rc)
        self.ROOT_PREFIX = self._conda_api.ROOT_PREFIX

        # Vars
        self._checking_repos = None
        self._data_directory = None
        self._files_downloaded = None
        self._repodata_files = None
        self._valid_repos = None

        # Expose some methods for convenient access. Methods return a worker
        self.conda_create = self._conda_api.create
        self.conda_create_yaml = self._conda_api.create_from_yaml
        self.conda_clone = self._conda_api.clone_environment
        self.conda_dependencies = self._conda_api.dependencies
        self.conda_get_condarc_channels = self._conda_api.get_condarc_channels
        self.conda_install = self._conda_api.install
        self.conda_remove = self._conda_api.remove
        self.conda_terminate = self._conda_api.terminate_all_processes
        self.conda_config_add = self._conda_api.config_add
        self.conda_config_remove = self._conda_api.config_remove
        self.pip_list = self._conda_api.pip_list
        self.pip_remove = self._conda_api.pip_remove

        # No workers are returned for these methods
        self.conda_clear_lock = self._conda_api.clear_lock
        self.conda_environment_exists = self._conda_api.environment_exists
        self.conda_get_envs = self._conda_api.get_envs
        self.conda_linked = self._conda_api.linked
        self.conda_get_prefix_envname = self._conda_api.get_prefix_envname
        self.conda_package_version = self._conda_api.package_version
        self.conda_platform = self._conda_api.get_platform

        # These download methods return a worker
        get_api_info = self._requests_download_api.get_api_info
        is_valid_url = self._requests_download_api.is_valid_api_url
        is_valid_channel = self._requests_download_api.is_valid_channel
        terminate = self._requests_download_api.terminate
        self.download_requests = self._requests_download_api.download
        self.download_async = self._download_api.download
        self.download_async_terminate = self._download_api.terminate
        self.download_is_valid_url = self._requests_download_api.is_valid_url
        self.download_is_valid_api_url = is_valid_url
        self.download_get_api_info = lambda: get_api_info(self._client_api.
                                                          get_api_url())
        self.download_is_valid_channel = is_valid_channel
        self.download_requests_terminate = terminate

        # These client methods return a worker
        self.client_store_token = self._client_api.store_token
        self.client_remove_token = self._client_api.remove_token
        self.client_login = self._client_api.login
        self.client_logout = self._client_api.logout
        self.client_load_repodata = self._client_api.load_repodata
        self.client_prepare_packages_data = self._client_api.prepare_model_data
        self.client_user = self._client_api.user
        self.client_domain = self._client_api.domain
        self.client_set_domain = self._client_api.set_domain
        self.client_packages = self._client_api.packages
        self.client_multi_packages = self._client_api.multi_packages
        self.client_organizations = self._client_api.organizations
        self.client_load_token = self._client_api.load_token
        self.client_get_api_url = self._client_api.get_api_url
        self.client_set_api_url = self._client_api.set_api_url

    # --- Helper methods
    # -------------------------------------------------------------------------
    def _set_repo_urls_from_channels(self, channels):
        """
        Convert a channel into a normalized repo name including.

        Channels are assumed in normalized url form.
        """
        repos = []
        sys_platform = self._conda_api.get_platform()

        for channel in channels:
            url = '{0}/{1}/repodata.json.bz2'.format(channel, sys_platform)
            repos.append(url)

        return repos

    def _check_repos(self, repos):
        """Check if repodata urls are valid."""
        self._checking_repos = []
        self._valid_repos = []

        for repo in repos:
            worker = self.download_is_valid_url(repo)
            worker.sig_finished.connect(self._repos_checked)
            worker.repo = repo
            self._checking_repos.append(repo)

    def _repos_checked(self, worker, output, error):
        """Callback for _check_repos."""
        if worker.repo in self._checking_repos:
            self._checking_repos.remove(worker.repo)

        if output:
            self._valid_repos.append(worker.repo)

        if len(self._checking_repos) == 0:
            self._download_repodata(self._valid_repos)

    def _repo_url_to_path(self, repo):
        """Convert a `repo` url to a file path for local storage."""
        repo = repo.replace('http://', '')
        repo = repo.replace('https://', '')
        repo = repo.replace('/', '_')

        return os.sep.join([self._data_directory, repo])

    def _download_repodata(self, checked_repos):
        """Dowload repodata."""
        self._files_downloaded = []
        self._repodata_files = []
        self.__counter = -1

        if checked_repos:
            for repo in checked_repos:
                path = self._repo_url_to_path(repo)
                self._files_downloaded.append(path)
                self._repodata_files.append(path)
                worker = self.download_async(repo, path)
                worker.url = repo
                worker.path = path
                worker.sig_finished.connect(self._repodata_downloaded)
        else:
            # Empty, maybe there is no internet connection
            # Load information from conda-meta and save that file
            path = self._get_repodata_from_meta()
            self._repodata_files = [path]
            self._repodata_downloaded()

    def _get_repodata_from_meta(self):
        """Generate repodata from local meta files."""
        path = os.sep.join([self.ROOT_PREFIX, 'conda-meta'])
        packages = os.listdir(path)
        meta_repodata = {}
        for pkg in packages:
            if pkg.endswith('.json'):
                filepath = os.sep.join([path, pkg])
                with open(filepath, 'r') as f:
                    data = json.load(f)

                if 'files' in data:
                    data.pop('files')
                if 'icondata' in data:
                    data.pop('icondata')

                name = pkg.replace('.json', '')
                meta_repodata[name] = data

        meta_repodata_path = os.sep.join(
            [self._data_directory, 'offline.json'])
        repodata = {'info': [], 'packages': meta_repodata}

        with open(meta_repodata_path, 'w') as f:
            json.dump(repodata,
                      f,
                      sort_keys=True,
                      indent=4,
                      separators=(',', ': '))

        return meta_repodata_path

    def _repodata_downloaded(self, worker=None, output=None, error=None):
        """Callback for _download_repodata."""
        if worker:
            self._files_downloaded.remove(worker.path)

            if worker.path in self._files_downloaded:
                self._files_downloaded.remove(worker.path)

        if len(self._files_downloaded) == 0:
            self.sig_repodata_updated.emit(list(set(self._repodata_files)))

    # --- Public API
    # -------------------------------------------------------------------------
    def repodata_files(self, channels=None):
        """
        Return the repodata paths based on `channels` and the `data_directory`.

        There is no check for validity here.
        """
        if channels is None:
            channels = self.conda_get_condarc_channels()

        repodata_urls = self._set_repo_urls_from_channels(channels)

        repopaths = []

        for repourl in repodata_urls:
            fullpath = os.sep.join([self._repo_url_to_path(repourl)])
            repopaths.append(fullpath)

        return repopaths

    def set_data_directory(self, data_directory):
        """Set the directory where repodata and metadata are stored."""
        self._data_directory = data_directory

    def update_repodata(self, channels=None):
        """Update repodata from channels or use condarc channels if None."""
        norm_channels = self.conda_get_condarc_channels(channels=channels,
                                                        normalize=True)
        repodata_urls = self._set_repo_urls_from_channels(norm_channels)
        self._check_repos(repodata_urls)

    def update_metadata(self):
        """
        Update the metadata available for packages in repo.continuum.io.

        Returns a download worker.
        """
        if self._data_directory is None:
            raise Exception('Need to call `api.set_data_directory` first.')

        metadata_url = 'https://repo.continuum.io/pkgs/metadata.json'
        filepath = os.sep.join([self._data_directory, 'metadata.json'])
        worker = self.download_requests(metadata_url, filepath)
        return worker

    def check_valid_channel(self,
                            channel,
                            conda_url='https://conda.anaconda.org'):
        """Check if channel is valid."""
        if channel.startswith('https://') or channel.startswith('http://'):
            url = channel
        else:
            url = "{0}/{1}".format(conda_url, channel)

        if url[-1] == '/':
            url = url[:-1]
        plat = self.conda_platform()
        repodata_url = "{0}/{1}/{2}".format(url, plat, 'repodata.json')
        worker = self.download_is_valid_url(repodata_url)
        worker.url = url
        return worker
Exemple #7
0
class _RequestsDownloadAPI(QObject):
    """Download API based on requests."""

    _sig_download_finished = Signal(str, str)
    _sig_download_progress = Signal(str, str, int, int)

    def __init__(self, load_rc_func=None):
        """Download API based on requests."""
        super(QObject, self).__init__()
        self._conda_api = CondaAPI()
        self._queue = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()

        self._load_rc_func = load_rc_func
        self._chunk_size = 1024
        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)

    @property
    def proxy_servers(self):
        """Return the proxy servers available from the conda rc config file."""
        if self._load_rc_func is None:
            return {}
        else:
            return self._load_rc_func().get('proxy_servers', {})

    def _clean(self):
        """Check for inactive workers and remove their references."""
        if self._workers:
            for w in self._workers:
                if w.is_finished():
                    self._workers.remove(w)

        if self._threads:
            for t in self._threads:
                if t.isFinished():
                    self._threads.remove(t)
        else:
            self._timer.stop()

    def _start(self):
        """Start the next threaded worker in the queue."""
        if len(self._queue) == 1:
            thread = self._queue.popleft()
            thread.start()
            self._timer.start()

    def _create_worker(self, method, *args, **kwargs):
        """Create a new worker instance."""
        thread = QThread()
        worker = RequestsDownloadWorker(method, args, kwargs)
        worker.moveToThread(thread)
        worker.sig_finished.connect(self._start)
        self._sig_download_finished.connect(worker.sig_download_finished)
        self._sig_download_progress.connect(worker.sig_download_progress)
        worker.sig_finished.connect(thread.quit)
        thread.started.connect(worker.start)
        self._queue.append(thread)
        self._threads.append(thread)
        self._workers.append(worker)
        self._start()
        return worker

    def _download(self, url, path=None, force=False):
        """Callback for download."""
        if path is None:
            path = url.split('/')[-1]

        # Make dir if non existent
        folder = os.path.dirname(os.path.abspath(path))

        if not os.path.isdir(folder):
            os.makedirs(folder)

        # Start actual download
        try:
            r = requests.get(url, stream=True, proxies=self.proxy_servers)
        except Exception as error:
            print('ERROR', 'here', error)
            logger.error(str(error))
            # Break if error found!
#            self._sig_download_finished.emit(url, path)
#            return path

        total_size = int(r.headers.get('Content-Length', 0))

        # Check if file exists
        if os.path.isfile(path) and not force:
            file_size = os.path.getsize(path)

            # Check if existing file matches size of requested file
            if file_size == total_size:
                self._sig_download_finished.emit(url, path)
                return path

        # File not found or file size did not match. Download file.
        progress_size = 0
        with open(path, 'wb') as f:
            for chunk in r.iter_content(chunk_size=self._chunk_size):
                if chunk:
                    f.write(chunk)
                    progress_size += len(chunk)
                    self._sig_download_progress.emit(url, path,
                                                     progress_size,
                                                     total_size)
            self._sig_download_finished.emit(url, path)

        return path

    def _is_valid_url(self, url):
        """Callback for is_valid_url."""
        try:
            r = requests.head(url, proxies=self.proxy_servers)
            value = r.status_code in [200]
        except Exception as error:
            logger.error(str(error))
            value = False

        return value

    def _is_valid_channel(self, channel,
                          conda_url='https://conda.anaconda.org'):
        """Callback for is_valid_channel."""
        if channel.startswith('https://') or channel.startswith('http://'):
            url = channel
        else:
            url = "{0}/{1}".format(conda_url, channel)

        if url[-1] == '/':
            url = url[:-1]

        plat = self._conda_api.get_platform()
        repodata_url = "{0}/{1}/{2}".format(url, plat, 'repodata.json')

        try:
            r = requests.head(repodata_url, proxies=self.proxy_servers)
            value = r.status_code in [200]
        except Exception as error:
            logger.error(str(error))
            value = False

        return value

    def _is_valid_api_url(self, url):
        """Callback for is_valid_api_url."""
        # Check response is a JSON with ok: 1
        data = {}
        try:
            r = requests.get(url, proxies=self.proxy_servers)
            content = to_text_string(r.content, encoding='utf-8')
            data = json.loads(content)
        except Exception as error:
            logger.error(str(error))

        return data.get('ok', 0) == 1

    # --- Public API
    # -------------------------------------------------------------------------
    def download(self, url, path=None, force=False):
        """Download file given by url and save it to path."""
        logger.debug(str((url, path, force)))
        method = self._download
        return self._create_worker(method, url, path=path, force=force)

    def terminate(self):
        """Terminate all workers and threads."""
        for t in self._threads:
            t.quit()
        self._thread = []
        self._workers = []

    def is_valid_url(self, url, non_blocking=True):
        """Check if url is valid."""
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_url
            return self._create_worker(method, url)
        else:
            return self._is_valid_url(url)

    def is_valid_api_url(self, url, non_blocking=True):
        """Check if anaconda api url is valid."""
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_api_url
            return self._create_worker(method, url)
        else:
            return self._is_valid_api_url(url=url)

    def is_valid_channel(self,
                         channel,
                         conda_url='https://conda.anaconda.org',
                         non_blocking=True):
        """Check if a conda channel is valid."""
        logger.debug(str((channel, conda_url)))
        if non_blocking:
            method = self._is_valid_channel
            return self._create_worker(method, channel, conda_url)
        else:
            return self._is_valid_channel(channel, conda_url=conda_url)

    def get_api_info(self, url):
        """Query anaconda api info."""
        data = {}
        try:
            r = requests.get(url, proxies=self.proxy_servers)
            content = to_text_string(r.content, encoding='utf-8')
            data = json.loads(content)
            if not data:
                data['api_url'] = url
            if 'conda_url' not in data:
                data['conda_url'] = 'https://conda.anaconda.org'
        except Exception as error:
            logger.error(str(error))

        return data
class _RequestsDownloadAPI(QObject):
    """
    """
    _sig_download_finished = Signal(str, str)
    _sig_download_progress = Signal(str, str, int, int)

    def __init__(self):
        super(QObject, self).__init__()
        self._conda_api = CondaAPI()
        self._queue = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()

        self._chunk_size = 1024
        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)

    def _clean(self):
        """
        Periodically check for inactive workers and remove their references.
        """
        if self._workers:
            for w in self._workers:
                if w.is_finished():
                    self._workers.remove(w)

        if self._threads:
            for t in self._threads:
                if t.isFinished():
                    self._threads.remove(t)
        else:
            self._timer.stop()

    def _start(self):
        """
        """
        if len(self._queue) == 1:
            thread = self._queue.popleft()
            thread.start()
            self._timer.start()

    def _create_worker(self, method, *args, **kwargs):
        """
        """
        # FIXME: this might be heavy...
        thread = QThread()
        worker = RequestsDownloadWorker(method, args, kwargs)
        worker.moveToThread(thread)
        worker.sig_finished.connect(self._start)
        self._sig_download_finished.connect(worker.sig_download_finished)
        self._sig_download_progress.connect(worker.sig_download_progress)
        worker.sig_finished.connect(thread.quit)
        thread.started.connect(worker.start)
        self._queue.append(thread)
        self._threads.append(thread)
        self._workers.append(worker)
        self._start()
        return worker

    def _download(self, url, path=None, force=False):
        """
        """
        if path is None:
            path = url.split('/')[-1]

        # Make dir if non existent
        folder = os.path.dirname(os.path.abspath(path))

        if not os.path.isdir(folder):
            os.makedirs(folder)

        # Start actual download
        try:
            r = requests.get(url, stream=True)
        except Exception as error:
            logger.error(str(error))
            # Break if error found!


#            self._sig_download_finished.emit(url, path)
#            return path

        total_size = int(r.headers.get('Content-Length', 0))

        # Check if file exists
        if os.path.isfile(path) and not force:
            file_size = os.path.getsize(path)

            # Check if existing file matches size of requested file
            if file_size == total_size:
                self._sig_download_finished.emit(url, path)
                return path

        # File not found or file size did not match. Download file.
        progress_size = 0
        with open(path, 'wb') as f:
            for chunk in r.iter_content(chunk_size=self._chunk_size):
                if chunk:
                    f.write(chunk)
                    progress_size += len(chunk)
                    self._sig_download_progress.emit(url, path, progress_size,
                                                     total_size)
            self._sig_download_finished.emit(url, path)

        return path

    def _is_valid_url(self, url):
        try:
            r = requests.head(url)
            value = r.status_code in [200]
        except Exception as error:
            logger.error(str(error))
            value = False

        return value

    def _is_valid_channel(self,
                          channel,
                          conda_url='https://conda.anaconda.org'):
        """
        """
        if channel.startswith('https://') or channel.startswith('http://'):
            url = channel
        else:
            url = "{0}/{1}".format(conda_url, channel)

        if url[-1] == '/':
            url = url[:-1]

        plat = self._conda_api.get_platform()
        repodata_url = "{0}/{1}/{2}".format(url, plat, 'repodata.json')

        try:
            r = requests.head(repodata_url)
            value = r.status_code in [200]
        except Exception as error:
            logger.error(str(error))
            value = False

        return value

    def _is_valid_api_url(self, url):
        """
        """
        # Check response is a JSON with ok: 1
        data = {}
        try:
            r = requests.get(url)
            content = to_text_string(r.content, encoding='utf-8')
            data = json.loads(content)
        except Exception as error:
            logger.error(str(error))

        return data.get('ok', 0) == 1

    def download(self, url, path=None, force=False):
        logger.debug(str((url, path, force)))
        method = self._download
        return self._create_worker(method, url, path=path, force=force)

    def terminate(self):
        for t in self._threads:
            t.quit()
        self._thread = []
        self._workers = []

    def is_valid_url(self, url, non_blocking=True):
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_url
            return self._create_worker(method, url)
        else:
            return self._is_valid_url(url)

    def is_valid_api_url(self, url, non_blocking=True):
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_api_url
            return self._create_worker(method, url)
        else:
            return self._is_valid_api_url(url=url)

    def is_valid_channel(self,
                         channel,
                         conda_url='https://conda.anaconda.org',
                         non_blocking=True):
        logger.debug(str((channel, conda_url)))
        if non_blocking:
            method = self._is_valid_channel
            return self._create_worker(method, channel, conda_url)
        else:
            return self._is_valid_channel(channel, conda_url=conda_url)
class _RequestsDownloadAPI(QObject):
    """
    """
    _sig_download_finished = Signal(str, str)
    _sig_download_progress = Signal(str, str, int, int)

    def __init__(self):
        super(QObject, self).__init__()
        self._conda_api = CondaAPI()
        self._queue = deque()
        self._threads = []
        self._workers = []
        self._timer = QTimer()

        self._chunk_size = 1024
        self._timer.setInterval(1000)
        self._timer.timeout.connect(self._clean)

    def _clean(self):
        """
        Periodically check for inactive workers and remove their references.
        """
        if self._workers:
            for w in self._workers:
                if w.is_finished():
                    self._workers.remove(w)

        if self._threads:
            for t in self._threads:
                if t.isFinished():
                    self._threads.remove(t)
        else:
            self._timer.stop()

    def _start(self):
        """
        """
        if len(self._queue) == 1:
            thread = self._queue.popleft()
            thread.start()
            self._timer.start()

    def _create_worker(self, method, *args, **kwargs):
        """
        """
        # FIXME: this might be heavy...
        thread = QThread()
        worker = RequestsDownloadWorker(method, args, kwargs)
        worker.moveToThread(thread)
        worker.sig_finished.connect(self._start)
        self._sig_download_finished.connect(worker.sig_download_finished)
        self._sig_download_progress.connect(worker.sig_download_progress)
        worker.sig_finished.connect(thread.quit)
        thread.started.connect(worker.start)
        self._queue.append(thread)
        self._threads.append(thread)
        self._workers.append(worker)
        self._start()
        return worker

    def _download(self, url, path=None, force=False):
        """
        """
        if path is None:
            path = url.split('/')[-1]

        # Make dir if non existent
        folder = os.path.dirname(os.path.abspath(path))

        if not os.path.isdir(folder):
            os.makedirs(folder)

        # Start actual download
        try:
            r = requests.get(url, stream=True)
        except Exception as error:
            logger.error(str(error))
            # Break if error found!
#            self._sig_download_finished.emit(url, path)
#            return path

        total_size = int(r.headers.get('Content-Length', 0))

        # Check if file exists
        if os.path.isfile(path) and not force:
            file_size = os.path.getsize(path)

            # Check if existing file matches size of requested file
            if file_size == total_size:
                self._sig_download_finished.emit(url, path)
                return path

        # File not found or file size did not match. Download file.
        progress_size = 0
        with open(path, 'wb') as f:
            for chunk in r.iter_content(chunk_size=self._chunk_size):
                if chunk:
                    f.write(chunk)
                    progress_size += len(chunk)
                    self._sig_download_progress.emit(url, path,
                                                     progress_size,
                                                     total_size)
            self._sig_download_finished.emit(url, path)

        return path

    def _is_valid_url(self, url):
        try:
            r = requests.head(url)
            value = r.status_code in [200]
        except Exception as error:
            logger.error(str(error))
            value = False

        return value

    def _is_valid_channel(self, channel,
                          conda_url='https://conda.anaconda.org'):
        """
        """
        if channel.startswith('https://') or channel.startswith('http://'):
            url = channel
        else:
            url = "{0}/{1}".format(conda_url, channel)

        if url[-1] == '/':
            url = url[:-1]

        plat = self._conda_api.get_platform()
        repodata_url = "{0}/{1}/{2}".format(url, plat, 'repodata.json')

        try:
            r = requests.head(repodata_url)
            value = r.status_code in [200]
        except Exception as error:
            logger.error(str(error))
            value = False

        return value

    def _is_valid_api_url(self, url):
        """
        """
        # Check response is a JSON with ok: 1
        data = {}
        try:
            r = requests.get(url)
            content = to_text_string(r.content, encoding='utf-8')
            data = json.loads(content)
        except Exception as error:
            logger.error(str(error))

        return data.get('ok', 0) == 1

    def download(self, url, path=None, force=False):
        logger.debug(str((url, path, force)))
        method = self._download
        return self._create_worker(method, url, path=path, force=force)

    def terminate(self):
        for t in self._threads:
            t.quit()
        self._thread = []
        self._workers = []

    def is_valid_url(self, url, non_blocking=True):
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_url
            return self._create_worker(method, url)
        else:
            return self._is_valid_url(url)

    def is_valid_api_url(self, url, non_blocking=True):
        logger.debug(str((url)))
        if non_blocking:
            method = self._is_valid_api_url
            return self._create_worker(method, url)
        else:
            return self._is_valid_api_url(url=url)

    def is_valid_channel(self, channel,
                         conda_url='https://conda.anaconda.org',
                         non_blocking=True):
        logger.debug(str((channel, conda_url)))
        if non_blocking:
            method = self._is_valid_channel
            return self._create_worker(method, channel, conda_url)
        else:
            return self._is_valid_channel(channel, conda_url=conda_url)
    def __init__(self):
        """Anaconda Manager API process worker."""
        super(_ManagerAPI, self).__init__()

        # API's
        self._conda_api = CondaAPI()
        self._client_api = ClientAPI()
        self._download_api = DownloadAPI(load_rc_func=self._conda_api.load_rc)
        self._requests_download_api = RequestsDownloadAPI(
            load_rc_func=self._conda_api.load_rc)
        self.ROOT_PREFIX = self._conda_api.ROOT_PREFIX

        # Vars
        self._checking_repos = None
        self._data_directory = None
        self._files_downloaded = None
        self._repodata_files = None
        self._valid_repos = None

        # Expose some methods for convenient access. Methods return a worker
        self.conda_create = self._conda_api.create
        self.conda_create_yaml = self._conda_api.create_from_yaml
        self.conda_clone = self._conda_api.clone_environment
        self.conda_dependencies = self._conda_api.dependencies
        self.conda_get_condarc_channels = self._conda_api.get_condarc_channels
        self.conda_install = self._conda_api.install
        self.conda_remove = self._conda_api.remove
        self.conda_terminate = self._conda_api.terminate_all_processes
        self.conda_config_add = self._conda_api.config_add
        self.conda_config_remove = self._conda_api.config_remove
        self.pip_list = self._conda_api.pip_list
        self.pip_remove = self._conda_api.pip_remove

        # No workers are returned for these methods
        self.conda_clear_lock = self._conda_api.clear_lock
        self.conda_environment_exists = self._conda_api.environment_exists
        self.conda_get_envs = self._conda_api.get_envs
        self.conda_linked = self._conda_api.linked
        self.conda_get_prefix_envname = self._conda_api.get_prefix_envname
        self.conda_package_version = self._conda_api.package_version
        self.conda_platform = self._conda_api.get_platform

        # These download methods return a worker
        get_api_info = self._requests_download_api.get_api_info
        is_valid_url = self._requests_download_api.is_valid_api_url
        is_valid_channel = self._requests_download_api.is_valid_channel
        terminate = self._requests_download_api.terminate
        self.download_requests = self._requests_download_api.download
        self.download_async = self._download_api.download
        self.download_async_terminate = self._download_api.terminate
        self.download_is_valid_url = self._requests_download_api.is_valid_url
        self.download_is_valid_api_url = is_valid_url
        self.download_get_api_info = lambda: get_api_info(
            self._client_api.get_api_url())
        self.download_is_valid_channel = is_valid_channel
        self.download_requests_terminate = terminate

        # These client methods return a worker
        self.client_store_token = self._client_api.store_token
        self.client_remove_token = self._client_api.remove_token
        self.client_login = self._client_api.login
        self.client_logout = self._client_api.logout
        self.client_load_repodata = self._client_api.load_repodata
        self.client_prepare_packages_data = self._client_api.prepare_model_data
        self.client_user = self._client_api.user
        self.client_domain = self._client_api.domain
        self.client_set_domain = self._client_api.set_domain
        self.client_packages = self._client_api.packages
        self.client_multi_packages = self._client_api.multi_packages
        self.client_organizations = self._client_api.organizations
        self.client_load_token = self._client_api.load_token
        self.client_get_api_url = self._client_api.get_api_url
        self.client_set_api_url = self._client_api.set_api_url
class _ManagerAPI(QObject):
    """Anaconda Manager API process worker."""

    sig_repodata_updated = Signal(object)
    sig_repodata_errored = Signal()

    def __init__(self):
        """Anaconda Manager API process worker."""
        super(_ManagerAPI, self).__init__()

        # API's
        self._conda_api = CondaAPI()
        self._client_api = ClientAPI()
        self._download_api = DownloadAPI(load_rc_func=self._conda_api.load_rc)
        self._requests_download_api = RequestsDownloadAPI(
            load_rc_func=self._conda_api.load_rc)
        self.ROOT_PREFIX = self._conda_api.ROOT_PREFIX

        # Vars
        self._checking_repos = None
        self._data_directory = None
        self._files_downloaded = None
        self._repodata_files = None
        self._valid_repos = None

        # Expose some methods for convenient access. Methods return a worker
        self.conda_create = self._conda_api.create
        self.conda_create_yaml = self._conda_api.create_from_yaml
        self.conda_clone = self._conda_api.clone_environment
        self.conda_dependencies = self._conda_api.dependencies
        self.conda_get_condarc_channels = self._conda_api.get_condarc_channels
        self.conda_install = self._conda_api.install
        self.conda_remove = self._conda_api.remove
        self.conda_terminate = self._conda_api.terminate_all_processes
        self.conda_config_add = self._conda_api.config_add
        self.conda_config_remove = self._conda_api.config_remove
        self.pip_list = self._conda_api.pip_list
        self.pip_remove = self._conda_api.pip_remove

        # No workers are returned for these methods
        self.conda_clear_lock = self._conda_api.clear_lock
        self.conda_environment_exists = self._conda_api.environment_exists
        self.conda_get_envs = self._conda_api.get_envs
        self.conda_linked = self._conda_api.linked
        self.conda_get_prefix_envname = self._conda_api.get_prefix_envname
        self.conda_package_version = self._conda_api.package_version
        self.conda_platform = self._conda_api.get_platform

        # These download methods return a worker
        get_api_info = self._requests_download_api.get_api_info
        is_valid_url = self._requests_download_api.is_valid_api_url
        is_valid_channel = self._requests_download_api.is_valid_channel
        terminate = self._requests_download_api.terminate
        self.download_requests = self._requests_download_api.download
        self.download_async = self._download_api.download
        self.download_async_terminate = self._download_api.terminate
        self.download_is_valid_url = self._requests_download_api.is_valid_url
        self.download_is_valid_api_url = is_valid_url
        self.download_get_api_info = lambda: get_api_info(
            self._client_api.get_api_url())
        self.download_is_valid_channel = is_valid_channel
        self.download_requests_terminate = terminate

        # These client methods return a worker
        self.client_store_token = self._client_api.store_token
        self.client_remove_token = self._client_api.remove_token
        self.client_login = self._client_api.login
        self.client_logout = self._client_api.logout
        self.client_load_repodata = self._client_api.load_repodata
        self.client_prepare_packages_data = self._client_api.prepare_model_data
        self.client_user = self._client_api.user
        self.client_domain = self._client_api.domain
        self.client_set_domain = self._client_api.set_domain
        self.client_packages = self._client_api.packages
        self.client_multi_packages = self._client_api.multi_packages
        self.client_organizations = self._client_api.organizations
        self.client_load_token = self._client_api.load_token
        self.client_get_api_url = self._client_api.get_api_url
        self.client_set_api_url = self._client_api.set_api_url

    # --- Helper methods
    # -------------------------------------------------------------------------
    def _set_repo_urls_from_channels(self, channels):
        """
        Convert a channel into a normalized repo name including.

        Channels are assumed in normalized url form.
        """
        repos = []
        sys_platform = self._conda_api.get_platform()

        for channel in channels:
            url = '{0}/{1}/repodata.json.bz2'.format(channel, sys_platform)
            repos.append(url)

        return repos

    def _check_repos(self, repos):
        """Check if repodata urls are valid."""
        self._checking_repos = []
        self._valid_repos = []

        for repo in repos:
            worker = self.download_is_valid_url(repo)
            worker.sig_finished.connect(self._repos_checked)
            worker.repo = repo
            self._checking_repos.append(repo)

    def _repos_checked(self, worker, output, error):
        """Callback for _check_repos."""
        if worker.repo in self._checking_repos:
            self._checking_repos.remove(worker.repo)

        if output:
            self._valid_repos.append(worker.repo)

        if len(self._checking_repos) == 0:
            self._download_repodata(self._valid_repos)

    def _repo_url_to_path(self, repo):
        """Convert a `repo` url to a file path for local storage."""
        repo = repo.replace('http://', '')
        repo = repo.replace('https://', '')
        repo = repo.replace('/', '_')

        return os.sep.join([self._data_directory, repo])

    def _download_repodata(self, checked_repos):
        """Dowload repodata."""
        self._files_downloaded = []
        self._repodata_files = []
        self.__counter = -1

        if checked_repos:
            for repo in checked_repos:
                path = self._repo_url_to_path(repo)
                self._files_downloaded.append(path)
                self._repodata_files.append(path)
                worker = self.download_async(repo, path)
                worker.url = repo
                worker.path = path
                worker.sig_finished.connect(self._repodata_downloaded)
        else:
            # Empty, maybe there is no internet connection
            # Load information from conda-meta and save that file
            path = self._get_repodata_from_meta()
            self._repodata_files = [path]
            self._repodata_downloaded()

    def _get_repodata_from_meta(self):
        """Generate repodata from local meta files."""
        path = os.sep.join([self.ROOT_PREFIX, 'conda-meta'])
        packages = os.listdir(path)
        meta_repodata = {}
        for pkg in packages:
            if pkg.endswith('.json'):
                filepath = os.sep.join([path, pkg])
                with open(filepath, 'r') as f:
                    data = json.load(f)

                if 'files' in data:
                    data.pop('files')
                if 'icondata' in data:
                    data.pop('icondata')

                name = pkg.replace('.json', '')
                meta_repodata[name] = data

        meta_repodata_path = os.sep.join([self._data_directory,
                                          'offline.json'])
        repodata = {'info': [],
                    'packages': meta_repodata}

        with open(meta_repodata_path, 'w') as f:
            json.dump(repodata, f, sort_keys=True,
                      indent=4, separators=(',', ': '))

        return meta_repodata_path

    def _repodata_downloaded(self, worker=None, output=None, error=None):
        """Callback for _download_repodata."""
        if worker:
            self._files_downloaded.remove(worker.path)

            if worker.path in self._files_downloaded:
                self._files_downloaded.remove(worker.path)

        if len(self._files_downloaded) == 0:
            self.sig_repodata_updated.emit(list(set(self._repodata_files)))

    # --- Public API
    # -------------------------------------------------------------------------
    def repodata_files(self, channels=None):
        """
        Return the repodata paths based on `channels` and the `data_directory`.

        There is no check for validity here.
        """
        if channels is None:
            channels = self.conda_get_condarc_channels()

        repodata_urls = self._set_repo_urls_from_channels(channels)

        repopaths = []

        for repourl in repodata_urls:
            fullpath = os.sep.join([self._repo_url_to_path(repourl)])
            repopaths.append(fullpath)

        return repopaths

    def set_data_directory(self, data_directory):
        """Set the directory where repodata and metadata are stored."""
        self._data_directory = data_directory

    def update_repodata(self, channels=None):
        """Update repodata from channels or use condarc channels if None."""
        norm_channels = self.conda_get_condarc_channels(channels=channels,
                                                        normalize=True)
        repodata_urls = self._set_repo_urls_from_channels(norm_channels)
        self._check_repos(repodata_urls)

    def update_metadata(self):
        """
        Update the metadata available for packages in repo.continuum.io.

        Returns a download worker.
        """
        if self._data_directory is None:
            raise Exception('Need to call `api.set_data_directory` first.')

        metadata_url = 'https://repo.continuum.io/pkgs/metadata.json'
        filepath = os.sep.join([self._data_directory, 'metadata.json'])
        worker = self.download_requests(metadata_url, filepath)
        return worker

    def check_valid_channel(self,
                            channel,
                            conda_url='https://conda.anaconda.org'):
        """Check if channel is valid."""
        if channel.startswith('https://') or channel.startswith('http://'):
            url = channel
        else:
            url = "{0}/{1}".format(conda_url, channel)

        if url[-1] == '/':
            url = url[:-1]
        plat = self.conda_platform()
        repodata_url = "{0}/{1}/{2}".format(url, plat, 'repodata.json')
        worker = self.download_is_valid_url(repodata_url)
        worker.url = url
        return worker