def _on_downloaded_headers(self): """On header from uri downloaded""" # handle error for headers... error_code = self._reply.error() if error_code > 0: self._on_errors(error_code) return None fullpath = osp.join(self._save_path, self._filename) headers = {} data = self._reply.rawHeaderPairs() for d in data: if isinstance(d[0], QByteArray): d = [d[0].data(), d[1].data()] key = to_text_string(d[0], encoding='ascii') value = to_text_string(d[1], encoding='ascii') headers[key.lower()] = value if len(headers) != 0: header_filesize = int(headers['content-length']) local_filesize = int(osp.getsize(fullpath)) if header_filesize == local_filesize: self._free = True self._start_next_download() else: self._get()
def setup_model(self, packages_names, packages_versions, row_data): """ """ self.proxy_model = MultiColumnSortFilterProxy(self) self.source_model = CondaPackagesModel(self, packages_names, packages_versions, row_data) self.proxy_model.setSourceModel(self.source_model) self.setModel(self.proxy_model) # Custom Proxy Model setup self.proxy_model.setDynamicSortFilter(True) filter_text = \ (lambda row, text, status: ( all([t in row[const.NAME].lower() for t in to_text_string(text).lower().split()]) or all([t in row[const.DESCRIPTION].lower() for t in to_text_string(text).split()]))) filter_status = (lambda row, text, status: to_text_string(row[const.STATUS]) in to_text_string(status)) self.model().add_filter_function('status-search', filter_status) self.model().add_filter_function('text-search', filter_text) # Signals and slots self.verticalScrollBar().valueChanged.connect(self.resize_rows) self.hide_columns() # self.resizeRowsToContents() self.resize_rows()
def _request_finished(self, reply): """Callback for download once the request has finished.""" url = to_text_string(reply.url().toEncoded(), encoding='utf-8') if url in self._paths: path = self._paths[url] if url in self._workers: worker = self._workers[url] if url in self._head_requests: error = reply.error() # print(url, error) if error: logger.error(str(('Head Reply Error:', error))) worker.sig_download_finished.emit(url, path) worker.sig_finished.emit(worker, path, error) return self._head_requests.pop(url) start_download = not bool(error) header_pairs = reply.rawHeaderPairs() headers = {} for hp in header_pairs: headers[to_text_string(hp[0]).lower()] = to_text_string(hp[1]) total_size = int(headers.get('content-length', 0)) # Check if file exists if os.path.isfile(path): file_size = os.path.getsize(path) # Check if existing file matches size of requested file start_download = file_size != total_size if start_download: # File sizes dont match, hence download file qurl = QUrl(url) request = QNetworkRequest(qurl) self._get_requests[url] = request reply = self._manager.get(request) error = reply.error() if error: logger.error(str(('Reply Error:', error))) reply.downloadProgress.connect( lambda r, t, w=worker: self._progress(r, t, w)) else: # File sizes match, dont download file or error? worker.finished = True worker.sig_download_finished.emit(url, path) worker.sig_finished.emit(worker, path, None) elif url in self._get_requests: data = reply.readAll() self._save(url, path, data)
def _call_conda_ready(self): """function called when QProcess in _call_conda finishes task""" function = self._function_called if self.stdout is None: stdout = to_text_string(self._process.readAllStandardOutput(), encoding=CondaProcess.ENCODING) else: stdout = self.stdout if self.error is None: stderr = to_text_string(self._process.readAllStandardError(), encoding=CondaProcess.ENCODING) else: stderr = self.error if function == 'get_conda_version': pat = re.compile(r'conda:?\s+(\d+\.\d\S+|unknown)') m = pat.match(stderr.strip()) if m is None: m = pat.match(stdout.strip()) if m is None: raise Exception('output did not match: %r' % stderr) self.output = m.group(1) # elif function == 'get_envs': # info = self.output # self.output = info['envs'] # elif function == 'get_prefix_envname': # name = self._name # envs = self.output # self.output = self._get_prefix_envname_helper(name, envs) # self._name = None elif function == 'config_path': result = self.output self.output = result['rc_path'] elif function == 'config_get': result = self.output self.output = result['get'] elif (function == 'config_delete' or function == 'config_add' or function == 'config_set' or function == 'config_remove'): result = self.output self.output = result.get('warnings', []) elif function == 'pip': result = [] lines = self.output.split('\n') for line in lines: if '<pip>' in line: temp = line.split()[:-1] + ['pip'] result.append('-'.join(temp)) self.output = result if stderr.strip(): self.error = stderr # raise Exception('conda %r:\nSTDERR:\n%s\nEND' % (extra_args, # stderr.decode())) self._parse = False
def filter_status_changed(self, text): """ """ if text not in const.PACKAGE_STATUS: text = const.PACKAGE_STATUS[text] for key in const.COMBOBOX_VALUES: val = const.COMBOBOX_VALUES[key] if to_text_string(val) == to_text_string(text): group = val break self._filterbox = group self.filter_changed()
def filter_status_changed(self, text): """ """ if text not in const.PACKAGE_STATUS: text = const.PACKAGE_STATUS[text] for key in const.COMBOBOX_VALUES: val = const.COMBOBOX_VALUES[key] if to_text_string(val) == to_text_string(text): group = val break self._filterbox = group self.filter_changed()
def show(self, dialog): """Generic method to show a non-modal dialog and keep reference to the Qt C++ object""" for dlg in list(self.dialogs.values()): if to_text_string(dlg.windowTitle()) \ == to_text_string(dialog.windowTitle()): dlg.show() dlg.raise_() break else: dialog.show() self.dialogs[id(dialog)] = dialog dialog.accepted.connect( lambda eid=id(dialog): self.dialog_finished(eid)) dialog.rejected.connect( lambda eid=id(dialog): self.dialog_finished(eid))
def _changed_version(self, version, dependencies=True): """ """ self._set_gui_disabled(True) version = self.combobox_version.currentText() install_dependencies = (self.checkbox.checkState() == Qt.Checked) self._version_text = to_text_string(version) self._get_dependencies(install_dependencies)
def _changed_version(self, version, dependencies=True): """ """ self._set_gui_disabled(True) version = self.combobox_version.currentText() install_dependencies = self.checkbox.checkState() == Qt.Checked self._version_text = to_text_string(version) self._get_dependencies(install_dependencies)
def download(self, url, path): """Download url and save data to path.""" # original_url = url # print(url) qurl = QUrl(url) url = to_text_string(qurl.toEncoded(), encoding='utf-8') logger.debug(str((url, path))) if url in self._workers: while not self._workers[url].finished: return self._workers[url] worker = DownloadWorker(url, path) # Check download folder exists folder = os.path.dirname(os.path.abspath(path)) if not os.path.isdir(folder): os.makedirs(folder) request = QNetworkRequest(qurl) self._head_requests[url] = request self._paths[url] = path self._workers[url] = worker self._manager.head(request) self._timer.start() return worker
def mimedata2url(source, extlist=None): """ Extract url list from MIME data extlist: for example ('.py', '.pyw') """ pathlist = [] if source.hasUrls(): for url in source.urls(): path = _process_mime_path(to_text_string(url.toString()), extlist) if path is not None: pathlist.append(path) elif source.hasText(): for rawpath in to_text_string(source.text()).splitlines(): path = _process_mime_path(rawpath, extlist) if path is not None: pathlist.append(path) if pathlist: return pathlist
def get_coding(text): """ Function to get the coding of a text. @param text text to inspect (string) @return coding string """ for line in text.splitlines()[:2]: result = CODING_RE.search(to_text_string(line)) if result: return result.group(1) return None
def get_coding(text): """ Function to get the coding of a text. @param text text to inspect (string) @return coding string """ for line in text.splitlines()[:2]: result = CODING_RE.search(to_text_string(line)) if result: return result.group(1) return None
def _is_valid_api_url(self, url): """Callback for is_valid_api_url.""" # Check response is a JSON with ok: 1 data = {} try: r = requests.get(url, proxies=self.proxy_servers) content = to_text_string(r.content, encoding='utf-8') data = json.loads(content) except Exception as error: logger.error(str(error)) return data.get('ok', 0) == 1
def decode(text): """ Function to decode a text. @param text text to decode (string) @return decoded text and encoding """ try: if text.startswith(BOM_UTF8): # UTF-8 with BOM return to_text_string(text[len(BOM_UTF8):], 'utf-8'), 'utf-8-bom' elif text.startswith(BOM_UTF16): # UTF-16 with BOM return to_text_string(text[len(BOM_UTF16):], 'utf-16'), 'utf-16' elif text.startswith(BOM_UTF32): # UTF-32 with BOM return to_text_string(text[len(BOM_UTF32):], 'utf-32'), 'utf-32' coding = get_coding(text) if coding: return to_text_string(text, coding), coding except (UnicodeError, LookupError): pass # Assume UTF-8 try: return to_text_string(text, 'utf-8'), 'utf-8-guessed' except (UnicodeError, LookupError): pass # Assume Latin-1 (behaviour before 3.7.1) return to_text_string(text, "latin-1"), 'latin-1-guessed'
def decode(text): """ Function to decode a text. @param text text to decode (string) @return decoded text and encoding """ try: if text.startswith(BOM_UTF8): # UTF-8 with BOM return to_text_string(text[len(BOM_UTF8):], 'utf-8'), 'utf-8-bom' elif text.startswith(BOM_UTF16): # UTF-16 with BOM return to_text_string(text[len(BOM_UTF16):], 'utf-16'), 'utf-16' elif text.startswith(BOM_UTF32): # UTF-32 with BOM return to_text_string(text[len(BOM_UTF32):], 'utf-32'), 'utf-32' coding = get_coding(text) if coding: return to_text_string(text, coding), coding except (UnicodeError, LookupError): pass # Assume UTF-8 try: return to_text_string(text, 'utf-8'), 'utf-8-guessed' except (UnicodeError, LookupError): pass # Assume Latin-1 (behaviour before 3.7.1) return to_text_string(text, "latin-1"), 'latin-1-guessed'
def to_unicode(string): """Convert a string to unicode""" if not is_unicode(string): for codec in CODECS: try: unic = to_text_string(string, codec) except UnicodeError: pass except TypeError: break else: return unic return string
def to_unicode(string): """Convert a string to unicode""" if not is_unicode(string): for codec in CODECS: try: unic = to_text_string(string, codec) except UnicodeError: pass except TypeError: break else: return unic return string
def setup_model(self, packages, data, metadata_links={}): """ """ self.proxy_model = MultiColumnSortFilterProxy(self) self.source_model = CondaPackagesModel(self, packages, data) self.proxy_model.setSourceModel(self.source_model) self.setModel(self.proxy_model) self.metadata_links = metadata_links # FIXME: packages sizes... move to a better place? packages_sizes = {} for name in packages: packages_sizes[name] = packages[name].get('size') self._packages_sizes = packages_sizes # Custom Proxy Model setup self.proxy_model.setDynamicSortFilter(True) filter_text = \ (lambda row, text, status: ( all([t in row[const.COL_NAME].lower() for t in to_text_string(text).lower().split()]) or all([t in row[const.COL_DESCRIPTION].lower() for t in to_text_string(text).split()]))) filter_status = (lambda row, text, status: to_text_string(row[const.COL_STATUS]) in to_text_string(status)) self.model().add_filter_function('status-search', filter_status) self.model().add_filter_function('text-search', filter_text) # Signals and slots self.verticalScrollBar().valueChanged.connect(self.resize_rows) self.hide_columns() self.resize_rows() self.refresh_actions() self.source_model.update_style_palette(self._palette)
def setup_model(self, packages, data, metadata_links={}): """ """ self.proxy_model = MultiColumnSortFilterProxy(self) self.source_model = CondaPackagesModel(self, packages, data) self.proxy_model.setSourceModel(self.source_model) self.setModel(self.proxy_model) self.metadata_links = metadata_links # FIXME: packages sizes... move to a better place? packages_sizes = {} for name in packages: packages_sizes[name] = packages[name].get('size') self._packages_sizes = packages_sizes # Custom Proxy Model setup self.proxy_model.setDynamicSortFilter(True) filter_text = \ (lambda row, text, status: ( all([t in row[const.COL_NAME].lower() for t in to_text_string(text).lower().split()]) or all([t in row[const.COL_DESCRIPTION].lower() for t in to_text_string(text).split()]))) filter_status = (lambda row, text, status: to_text_string(row[const.COL_STATUS]) in to_text_string(status)) self.model().add_filter_function('status-search', filter_status) self.model().add_filter_function('text-search', filter_text) # Signals and slots self.verticalScrollBar().valueChanged.connect(self.resize_rows) self.hide_columns() self.resize_rows() self.refresh_actions() self.source_model.update_style_palette(self._palette)
def to_unicode_from_fs(string): """ Return a unicode version of string decoded using the file system encoding. """ if not is_string(string): # string is a QString string = to_text_string(string.toUtf8(), 'utf-8') else: if is_binary_string(string): try: unic = string.decode(FS_ENCODING) except (UnicodeError, TypeError): pass else: return unic return string
def get_api_info(self, url): """Query anaconda api info.""" data = {} try: r = requests.get(url, proxies=self.proxy_servers) content = to_text_string(r.content, encoding='utf-8') data = json.loads(content) if not data: data['api_url'] = url if 'conda_url' not in data: data['conda_url'] = 'https://conda.anaconda.org' except Exception as error: logger.error(str(error)) return data
def to_unicode_from_fs(string): """ Return a unicode version of string decoded using the file system encoding. """ if not is_string(string): # string is a QString string = to_text_string(string.toUtf8(), 'utf-8') else: if is_binary_string(string): try: unic = string.decode(FS_ENCODING) except (UnicodeError, TypeError): pass else: return unic return string
def _load_repodata(self, filepaths, extra_data={}, metadata={}): """ Load all the available pacakges information for downloaded repodata files (repo.continuum.io), additional data provided (anaconda cloud), and additional metadata and merge into a single set of packages and apps. """ repodata = [] for filepath in filepaths: compressed = filepath.endswith('.bz2') mode = 'rb' if filepath.endswith('.bz2') else 'r' if os.path.isfile(filepath): with open(filepath, mode) as f: raw_data = f.read() if compressed: data = bz2.decompress(raw_data) else: data = raw_data try: data = json.loads(to_text_string(data, 'UTF-8')) except Exception as error: logger.error(str(error)) data = {} repodata.append(data) all_packages = {} for data in repodata: packages = data.get('packages', {}) for canonical_name in packages: data = packages[canonical_name] name, version, b = tuple(canonical_name.rsplit('-', 2)) if name not in all_packages: all_packages[name] = {'versions': set(), 'size': {}, 'type': {}, 'app_entry': {}, 'app_type': {}, } elif name in metadata: temp_data = all_packages[name] temp_data['home'] = metadata[name].get('home', '') temp_data['license'] = metadata[name].get('license', '') temp_data['summary'] = metadata[name].get('summary', '') temp_data['latest_version'] = metadata[name].get('version') all_packages[name] = temp_data all_packages[name]['versions'].add(version) all_packages[name]['size'][version] = data.get('size', '') # Only the latest builds will have the correct metadata for # apps, so only store apps that have the app metadata if data.get('type', None): all_packages[name]['type'][version] = data.get( 'type', None) all_packages[name]['app_entry'][version] = data.get( 'app_entry', None) all_packages[name]['app_type'][version] = data.get( 'app_type', None) all_apps = {} for name in all_packages: versions = sort_versions(list(all_packages[name]['versions'])) all_packages[name]['versions'] = versions[:] for version in versions: has_type = all_packages[name].get('type', None) # Has type in this case implies being an app if has_type: all_apps[name] = all_packages[name].copy() # Remove all versions that are not apps! versions = all_apps[name]['versions'][:] types = all_apps[name]['type'] app_versions = [v for v in versions if v in types] all_apps[name]['versions'] = app_versions return all_packages, all_apps
def filter_changed(self): """Trigger the filter""" group = self._filterbox text = self._searchbox if group in [const.ALL]: group = ''.join([to_text_string(const.INSTALLED), to_text_string(const.UPGRADABLE), to_text_string(const.NOT_INSTALLED), to_text_string(const.DOWNGRADABLE), to_text_string(const.MIXGRADABLE)]) elif group in [const.INSTALLED]: group = ''.join([to_text_string(const.INSTALLED), to_text_string(const.UPGRADABLE), to_text_string(const.DOWNGRADABLE), to_text_string(const.MIXGRADABLE)]) elif group in [const.UPGRADABLE]: group = ''.join([to_text_string(const.UPGRADABLE), to_text_string(const.MIXGRADABLE)]) elif group in [const.DOWNGRADABLE]: group = ''.join([to_text_string(const.DOWNGRADABLE), to_text_string(const.MIXGRADABLE)]) else: group = to_text_string(group) if self.proxy_model is not None: self.proxy_model.set_filter(text, group) self.resize_rows() # Update label count count = self.verticalHeader().count() if count == 0: count_text = _("0 packages available ") elif count == 1: count_text = _("1 package available ") elif count > 1: count_text = to_text_string(count) + _(" packages available ") if text != '': count_text = count_text + _('matching "{0}"').format(text) self.sig_status_updated.emit(count_text, False, [0, 0], True)
def search_string_changed(self, text): """ """ text = to_text_string(text) self._searchbox = text self.filter_changed()
def handle_qbytearray(obj, encoding): """Qt/Python3 compatibility helper.""" if isinstance(obj, QByteArray): obj = obj.data() return to_text_string(obj, encoding=encoding)
def _changed_version(self, version, dependencies=True): """ """ self._set_gui_disabled(True) install_dependencies = (self.checkbox.checkState() == 2) self._version_text = to_text_string(version) self._get_dependencies(install_dependencies)
def _load_repodata(self, filepaths, extra_data={}, metadata={}): """ Load all the available pacakges information for downloaded repodata files (repo.continuum.io), additional data provided (anaconda cloud), and additional metadata and merge into a single set of packages and apps. """ repodata = [] for filepath in filepaths: compressed = filepath.endswith('.bz2') mode = 'rb' if filepath.endswith('.bz2') else 'r' if os.path.isfile(filepath): with open(filepath, mode) as f: raw_data = f.read() if compressed: data = bz2.decompress(raw_data) else: data = raw_data try: data = json.loads(to_text_string(data, 'UTF-8')) except Exception as error: logger.error(str(error)) data = {} repodata.append(data) all_packages = {} for data in repodata: packages = data.get('packages', {}) for canonical_name in packages: data = packages[canonical_name] name, version, b = tuple(canonical_name.rsplit('-', 2)) if name not in all_packages: all_packages[name] = { 'versions': set(), 'size': {}, 'type': {}, 'app_entry': {}, 'app_type': {}, } elif name in metadata: temp_data = all_packages[name] temp_data['home'] = metadata[name].get('home', '') temp_data['license'] = metadata[name].get('license', '') temp_data['summary'] = metadata[name].get('summary', '') temp_data['latest_version'] = metadata[name].get('version') all_packages[name] = temp_data all_packages[name]['versions'].add(version) all_packages[name]['size'][version] = data.get('size', '') # Only the latest builds will have the correct metadata for # apps, so only store apps that have the app metadata if data.get('type', None): all_packages[name]['type'][version] = data.get( 'type', None) all_packages[name]['app_entry'][version] = data.get( 'app_entry', None) all_packages[name]['app_type'][version] = data.get( 'app_type', None) all_apps = {} for name in all_packages: versions = sort_versions(list(all_packages[name]['versions'])) all_packages[name]['versions'] = versions[:] for version in versions: has_type = all_packages[name].get('type', None) # Has type in this case implies being an app if has_type: all_apps[name] = all_packages[name].copy() # Remove all versions that are not apps! versions = all_apps[name]['versions'][:] types = all_apps[name]['type'] app_versions = [v for v in versions if v in types] all_apps[name]['versions'] = app_versions return all_packages, all_apps
def handle_qbytearray(obj, encoding): """ """ if isinstance(obj, QByteArray): obj = obj.data() return to_text_string(obj, encoding=encoding)
def filter_changed(self): """Trigger the filter""" group = self._filterbox text = self._searchbox if group in [const.ALL]: group = ''.join([to_text_string(const.INSTALLED), to_text_string(const.UPGRADABLE), to_text_string(const.NOT_INSTALLED), to_text_string(const.DOWNGRADABLE), to_text_string(const.MIXGRADABLE), to_text_string(const.NOT_INSTALLABLE)]) elif group in [const.INSTALLED]: group = ''.join([to_text_string(const.INSTALLED), to_text_string(const.UPGRADABLE), to_text_string(const.DOWNGRADABLE), to_text_string(const.MIXGRADABLE)]) elif group in [const.UPGRADABLE]: group = ''.join([to_text_string(const.UPGRADABLE), to_text_string(const.MIXGRADABLE)]) elif group in [const.DOWNGRADABLE]: group = ''.join([to_text_string(const.DOWNGRADABLE), to_text_string(const.MIXGRADABLE)]) elif group in [const.ALL_INSTALLABLE]: group = ''.join([to_text_string(const.INSTALLED), to_text_string(const.UPGRADABLE), to_text_string(const.NOT_INSTALLED), to_text_string(const.DOWNGRADABLE), to_text_string(const.MIXGRADABLE)]) else: group = to_text_string(group) if self.proxy_model is not None: self.proxy_model.set_filter(text, group) self.resize_rows() # Update label count count = self.verticalHeader().count() if count == 0: count_text = _("0 packages available ") elif count == 1: count_text = _("1 package available ") elif count > 1: count_text = to_text_string(count) + _(" packages available ") if text != '': count_text = count_text + _('matching "{0}"').format(text) self._parent._update_status(status=count_text, hide=False, env=True)
def search_string_changed(self, text): """ """ text = to_text_string(text) self._searchbox = text self.filter_changed()