def _run_multiple_actions(self, worker=None, output=None, error=None): """ """ logger.error(str(error)) if output and isinstance(output, dict): conda_error_type = output.get('error_type', None) conda_error = output.get('error', None) if conda_error_type or conda_error: self.conda_errors.append((conda_error_type, conda_error)) logger.error((conda_error_type, conda_error)) if self._multiple_process: status, func = self._multiple_process.popleft() self.update_status(status) worker = func() worker.sig_finished.connect(self._run_multiple_actions) worker.sig_partial.connect(self._partial_output_ready) else: if self.conda_errors and self.message_box_error: text = "The following errors occured:" error = '' for conda_error in self.conda_errors: error += str(conda_error[0]) + ':\n' error += str(conda_error[1]) + '\n\n' dlg = self.message_box_error(text=text, error=error, title='Conda process error') dlg.setMinimumWidth(400) dlg.exec_() self.update_status('', hide=False) self.setup()
def _conda_process_ready(self, worker, output, error): """ """ if error is not None: status = _('there was an error') self.update_status(hide=False, message=status) else: self.update_status(hide=True) conda_error = None conda_error_type = None if output and isinstance(output, dict): conda_error_type = output.get('error_type') conda_error = output.get('error') if conda_error_type or conda_error: logger.error((conda_error_type, conda_error)) dic = self._temporal_action_dic if dic['action'] == C.ACTION_CREATE: self.sig_environment_created.emit(conda_error, conda_error_type) elif dic['action'] == C.ACTION_CLONE: self.sig_environment_cloned.emit(conda_error, conda_error_type) elif dic['action'] == C.ACTION_REMOVE_ENV: self.sig_environment_removed.emit(conda_error, conda_error_type) self.setup()
def _user_private_packages_ready(self, worker, output, error): if error: logger.error(error) else: logger.debug('') packages = worker.packages apps = worker.apps worker = self.api.pip_list(prefix=self.prefix) worker.sig_finished.connect(self._pip_list_ready) worker.packages = packages worker.apps = apps # private_packages = {} # if output: # all_private_packages = output # for item in all_private_packages: # name = item.get('name', '') # public = item.get('public', True) # package_types = item.get('package_types', []) # latest_version = item.get('latest_version', '') # if name and not public and 'conda' in package_types: # private_packages[name] = {'versions': item.get('versions', []), # 'app_entry': {}, # 'type': {}, # 'size': {}, # 'latest_version': latest_version, # } worker.private_packages = output
def _setup_packages(self, worker, data, error): """ """ if error: logger.error(error) else: logger.debug('') combobox_index = self.combobox_filter.currentIndex() status = C.PACKAGE_STATUS[combobox_index] packages = worker.packages # Remove blacklisted packages for package in self.package_blacklist: if package in packages: packages.pop(package) for i, row in enumerate(data): if package == data[i][C.COL_NAME]: data.pop(i) self.table.setup_model(packages, data, self._metadata_links) self.combobox_filter.setCurrentIndex(combobox_index) self.filter_package(status) if self._current_model_index: self.table.setCurrentIndex(self._current_model_index) self.table.verticalScrollBar().setValue(self._current_table_scroll) if error: self.update_status(error, False) self.sig_packages_ready.emit() self.table.setFocus()
def _handle_proxy_auth(proxy, authenticator): """Callback for ssl_errors.""" # authenticator.setUser('1')` # authenticator.setPassword('1') logger.error(str(('Proxy authentication Error. ' 'Enter credentials in condarc', proxy, authenticator)))
def _is_valid_url(self, url): try: r = requests.head(url) value = r.status_code in [200] except Exception as error: logger.error(str(error)) value = False return value
def _request_finished(self, reply): """Callback for download once the request has finished.""" url = to_text_string(reply.url().toEncoded(), encoding='utf-8') if url in self._paths: path = self._paths[url] if url in self._workers: worker = self._workers[url] if url in self._head_requests: error = reply.error() # print(url, error) if error: logger.error(str(('Head Reply Error:', error))) worker.sig_download_finished.emit(url, path) worker.sig_finished.emit(worker, path, error) return self._head_requests.pop(url) start_download = not bool(error) header_pairs = reply.rawHeaderPairs() headers = {} for hp in header_pairs: headers[to_text_string(hp[0]).lower()] = to_text_string(hp[1]) total_size = int(headers.get('content-length', 0)) # Check if file exists if os.path.isfile(path): file_size = os.path.getsize(path) # Check if existing file matches size of requested file start_download = file_size != total_size if start_download: # File sizes dont match, hence download file qurl = QUrl(url) request = QNetworkRequest(qurl) self._get_requests[url] = request reply = self._manager.get(request) error = reply.error() if error: logger.error(str(('Reply Error:', error))) reply.downloadProgress.connect( lambda r, t, w=worker: self._progress(r, t, w)) else: # File sizes match, dont download file or error? worker.finished = True worker.sig_download_finished.emit(url, path) worker.sig_finished.emit(worker, path, None) elif url in self._get_requests: data = reply.readAll() self._save(url, path, data)
def _is_valid_url(self, url): """Callback for is_valid_url.""" try: r = requests.head(url, proxies=self.proxy_servers) value = r.status_code in [200] except Exception as error: logger.error(str(error)) value = False return value
def communicate(self): """ """ self._communicate_first = True self._process.waitForFinished() if self._partial_stdout is None: raw_stdout = self._process.readAllStandardOutput() stdout = handle_qbytearray(raw_stdout, _CondaAPI.UTF8) else: stdout = self._partial_stdout raw_stderr = self._process.readAllStandardError() stderr = handle_qbytearray(raw_stderr, _CondaAPI.UTF8) result = [stdout.encode(_CondaAPI.UTF8), stderr.encode(_CondaAPI.UTF8)] # FIXME: Why does anaconda client print to stderr??? if PY2: stderr = stderr.decode() if 'using anaconda cloud api site' not in stderr.lower(): if stderr.strip() and self._conda: raise Exception('{0}:\n' 'STDERR:\n{1}\nEND' ''.format(' '.join(self._cmd_list), stderr)) # elif stderr.strip() and self._pip: # raise PipError(self._cmd_list) else: result[-1] = '' if self._parse and stdout: try: result = json.loads(stdout), result[-1] except ValueError as error: result = stdout, error if 'error' in result[0]: error = '{0}: {1}'.format(" ".join(self._cmd_list), result[0]['error']) result = result[0], error if self._callback: result = self._callback(result[0], result[-1], **self._extra_kwargs), result[-1] self._result = result self.sig_finished.emit(self, result[0], result[-1]) if result[-1]: logger.error(str(('error', result[-1]))) self._fired = True return result
def _is_valid_api_url(self, url): """Callback for is_valid_api_url.""" # Check response is a JSON with ok: 1 data = {} try: r = requests.get(url, proxies=self.proxy_servers) content = to_text_string(r.content, encoding='utf-8') data = json.loads(content) except Exception as error: logger.error(str(error)) return data.get('ok', 0) == 1
def get_api_info(self, url): """Query anaconda api info.""" data = {} try: r = requests.get(url, proxies=self.proxy_servers) content = to_text_string(r.content, encoding='utf-8') data = json.loads(content) if not data: data['api_url'] = url if 'conda_url' not in data: data['conda_url'] = 'https://conda.anaconda.org' except Exception as error: logger.error(str(error)) return data
def _download(self, url, path=None, force=False): """ """ if path is None: path = url.split('/')[-1] # Make dir if non existent folder = os.path.dirname(os.path.abspath(path)) if not os.path.isdir(folder): os.makedirs(folder) # Start actual download try: r = requests.get(url, stream=True) except Exception as error: logger.error(str(error)) # Break if error found! # self._sig_download_finished.emit(url, path) # return path total_size = int(r.headers.get('Content-Length', 0)) # Check if file exists if os.path.isfile(path) and not force: file_size = os.path.getsize(path) # Check if existing file matches size of requested file if file_size == total_size: self._sig_download_finished.emit(url, path) return path # File not found or file size did not match. Download file. progress_size = 0 with open(path, 'wb') as f: for chunk in r.iter_content(chunk_size=self._chunk_size): if chunk: f.write(chunk) progress_size += len(chunk) self._sig_download_progress.emit(url, path, progress_size, total_size) self._sig_download_finished.emit(url, path) return path
def _prepare_model_data(self, worker=None, output=None, error=None): """ """ if error: logger.error(error) else: logger.debug('') packages, apps = output # worker = self.api.pip_list(prefix=self.prefix) # worker.sig_finished.connect(self._pip_list_ready) logins = self.get_logged_user_list_channels() worker = self.api.client_multi_packages(logins=logins, access='private') worker.sig_finished.connect(self._user_private_packages_ready) worker.packages = packages worker.apps = apps
def _download(self, url, path=None, force=False): """Callback for download.""" if path is None: path = url.split('/')[-1] # Make dir if non existent folder = os.path.dirname(os.path.abspath(path)) if not os.path.isdir(folder): os.makedirs(folder) # Start actual download try: r = requests.get(url, stream=True, proxies=self.proxy_servers) except Exception as error: print('ERROR', 'here', error) logger.error(str(error)) # Break if error found! # self._sig_download_finished.emit(url, path) # return path total_size = int(r.headers.get('Content-Length', 0)) # Check if file exists if os.path.isfile(path) and not force: file_size = os.path.getsize(path) # Check if existing file matches size of requested file if file_size == total_size: self._sig_download_finished.emit(url, path) return path # File not found or file size did not match. Download file. progress_size = 0 with open(path, 'wb') as f: for chunk in r.iter_content(chunk_size=self._chunk_size): if chunk: f.write(chunk) progress_size += len(chunk) self._sig_download_progress.emit(url, path, progress_size, total_size) self._sig_download_finished.emit(url, path) return path
def _metadata_updated(self, worker, path, error): """ """ if error: logger.error(error) else: logger.debug('') if path and osp.isfile(path): with open(path, 'r') as f: data = f.read() try: self._metadata = json.loads(data) except Exception: self._metadata = {} else: self._metadata = {} self.api.update_repodata(self._channels)
def _save(self, url, path, data): """Save `data` of downloaded `url` in `path`.""" worker = self._workers[url] path = self._paths[url] if len(data): try: with open(path, 'wb') as f: f.write(data) except Exception: logger.error((url, path)) # Clean up worker.finished = True worker.sig_download_finished.emit(url, path) worker.sig_finished.emit(worker, path, None) self._get_requests.pop(url) self._workers.pop(url) self._paths.pop(url)
def _is_valid_channel(self, channel, conda_url='https://conda.anaconda.org'): """Callback for is_valid_channel.""" if channel.startswith('https://') or channel.startswith('http://'): url = channel else: url = "{0}/{1}".format(conda_url, channel) if url[-1] == '/': url = url[:-1] plat = self._conda_api.get_platform() repodata_url = "{0}/{1}/{2}".format(url, plat, 'repodata.json') try: r = requests.head(repodata_url, proxies=self.proxy_servers) value = r.status_code in [200] except Exception as error: logger.error(str(error)) value = False return value
def _load_repodata(self, filepaths, extra_data={}, metadata={}): """ Load all the available pacakges information for downloaded repodata files (repo.continuum.io), additional data provided (anaconda cloud), and additional metadata and merge into a single set of packages and apps. """ repodata = [] for filepath in filepaths: compressed = filepath.endswith('.bz2') mode = 'rb' if filepath.endswith('.bz2') else 'r' if os.path.isfile(filepath): with open(filepath, mode) as f: raw_data = f.read() if compressed: data = bz2.decompress(raw_data) else: data = raw_data try: data = json.loads(to_text_string(data, 'UTF-8')) except Exception as error: logger.error(str(error)) data = {} repodata.append(data) all_packages = {} for data in repodata: packages = data.get('packages', {}) for canonical_name in packages: data = packages[canonical_name] name, version, b = tuple(canonical_name.rsplit('-', 2)) if name not in all_packages: all_packages[name] = { 'versions': set(), 'size': {}, 'type': {}, 'app_entry': {}, 'app_type': {}, } elif name in metadata: temp_data = all_packages[name] temp_data['home'] = metadata[name].get('home', '') temp_data['license'] = metadata[name].get('license', '') temp_data['summary'] = metadata[name].get('summary', '') temp_data['latest_version'] = metadata[name].get('version') all_packages[name] = temp_data all_packages[name]['versions'].add(version) all_packages[name]['size'][version] = data.get('size', '') # Only the latest builds will have the correct metadata for # apps, so only store apps that have the app metadata if data.get('type', None): all_packages[name]['type'][version] = data.get( 'type', None) all_packages[name]['app_entry'][version] = data.get( 'app_entry', None) all_packages[name]['app_type'][version] = data.get( 'app_type', None) all_apps = {} for name in all_packages: versions = sort_versions(list(all_packages[name]['versions'])) all_packages[name]['versions'] = versions[:] for version in versions: has_type = all_packages[name].get('type', None) # Has type in this case implies being an app if has_type: all_apps[name] = all_packages[name].copy() # Remove all versions that are not apps! versions = all_apps[name]['versions'][:] types = all_apps[name]['type'] app_versions = [v for v in versions if v in types] all_apps[name]['versions'] = app_versions return all_packages, all_apps
def _handle_ssl_errors(self, reply, errors): logger.error(str(('SSL Errors', errors)))
def _handle_ssl_errors(reply, errors): """Callback for ssl_errors.""" logger.error(str(('SSL Errors', errors, reply)))
def _load_repodata(self, filepaths, extra_data={}, metadata={}): """ Load all the available pacakges information for downloaded repodata files (repo.continuum.io), additional data provided (anaconda cloud), and additional metadata and merge into a single set of packages and apps. """ repodata = [] for filepath in filepaths: compressed = filepath.endswith('.bz2') mode = 'rb' if filepath.endswith('.bz2') else 'r' if os.path.isfile(filepath): with open(filepath, mode) as f: raw_data = f.read() if compressed: data = bz2.decompress(raw_data) else: data = raw_data try: data = json.loads(to_text_string(data, 'UTF-8')) except Exception as error: logger.error(str(error)) data = {} repodata.append(data) all_packages = {} for data in repodata: packages = data.get('packages', {}) for canonical_name in packages: data = packages[canonical_name] name, version, b = tuple(canonical_name.rsplit('-', 2)) if name not in all_packages: all_packages[name] = {'versions': set(), 'size': {}, 'type': {}, 'app_entry': {}, 'app_type': {}, } elif name in metadata: temp_data = all_packages[name] temp_data['home'] = metadata[name].get('home', '') temp_data['license'] = metadata[name].get('license', '') temp_data['summary'] = metadata[name].get('summary', '') temp_data['latest_version'] = metadata[name].get('version') all_packages[name] = temp_data all_packages[name]['versions'].add(version) all_packages[name]['size'][version] = data.get('size', '') # Only the latest builds will have the correct metadata for # apps, so only store apps that have the app metadata if data.get('type', None): all_packages[name]['type'][version] = data.get( 'type', None) all_packages[name]['app_entry'][version] = data.get( 'app_entry', None) all_packages[name]['app_type'][version] = data.get( 'app_type', None) all_apps = {} for name in all_packages: versions = sort_versions(list(all_packages[name]['versions'])) all_packages[name]['versions'] = versions[:] for version in versions: has_type = all_packages[name].get('type', None) # Has type in this case implies being an app if has_type: all_apps[name] = all_packages[name].copy() # Remove all versions that are not apps! versions = all_apps[name]['versions'][:] types = all_apps[name]['type'] app_versions = [v for v in versions if v in types] all_apps[name]['versions'] = app_versions return all_packages, all_apps