def get_package(self, pref, dest_folder, remote, output, recorder): conanfile_path = self._cache.package_layout(pref.ref).conanfile() self._hook_manager.execute("pre_download_package", conanfile_path=conanfile_path, reference=pref.ref, package_id=pref.id, remote=remote) output.info("Retrieving package %s from remote '%s' " % (pref.id, remote.name)) rm_conandir(dest_folder) # Remove first the destination folder t1 = time.time() try: pref = self._resolve_latest_pref(pref, remote) snapshot = self._call_remote(remote, "get_package_snapshot", pref) if not is_package_snapshot_complete(snapshot): raise PackageNotFoundException(pref) zipped_files = self._call_remote(remote, "get_package", pref, dest_folder) package_checksums = calc_files_checksum(zipped_files) with self._cache.package_layout( pref.ref).update_metadata() as metadata: metadata.packages[pref.id].revision = pref.revision metadata.packages[pref.id].recipe_revision = pref.ref.revision metadata.packages[pref.id].checksums = package_checksums duration = time.time() - t1 log_package_download(pref, duration, remote, zipped_files) unzip_and_get_files(zipped_files, dest_folder, PACKAGE_TGZ_NAME, output=self._output) # Issue #214 https://github.com/conan-io/conan/issues/214 touch_folder(dest_folder) if get_env("CONAN_READ_ONLY_CACHE", False): make_read_only(dest_folder) recorder.package_downloaded(pref, remote.url) output.success('Package installed %s' % pref.id) except NotFoundException: raise PackageNotFoundException(pref) except BaseException as e: output.error("Exception while getting package: %s" % str(pref.id)) output.error("Exception: %s %s" % (type(e), str(e))) try: output.warn("Trying to remove package folder: %s" % dest_folder) rmdir(dest_folder) except OSError as e: raise ConanException( "%s\n\nCouldn't remove folder '%s', might be busy or open. " "Close any app using it, and retry" % (str(e), dest_folder)) raise self._hook_manager.execute("post_download_package", conanfile_path=conanfile_path, reference=pref.ref, package_id=pref.id, remote=remote)
def _get_package(self, layout, pref, remote, output, recorder, info): t1 = time.time() try: headers = _headers_for_info(info) pref = self._resolve_latest_pref(pref, remote, headers=headers) snapshot = self._call_remote(remote, "get_package_snapshot", pref) if not is_package_snapshot_complete(snapshot): raise PackageNotFoundException(pref) download_pkg_folder = layout.download_package(pref) # Download files to the pkg_tgz folder, not to the final one zipped_files = self._call_remote(remote, "get_package", pref, download_pkg_folder) # Compute and update the package metadata package_checksums = calc_files_checksum(zipped_files) with layout.update_metadata() as metadata: metadata.packages[pref.id].revision = pref.revision metadata.packages[pref.id].recipe_revision = pref.ref.revision metadata.packages[pref.id].checksums = package_checksums metadata.packages[pref.id].remote = remote.name duration = time.time() - t1 log_package_download(pref, duration, remote, zipped_files) tgz_file = zipped_files.pop(PACKAGE_TGZ_NAME, None) check_compressed_files(PACKAGE_TGZ_NAME, zipped_files) package_folder = layout.package(pref) if tgz_file: # This must happen always, but just in case # TODO: The output could be changed to the package one, but uncompress_file(tgz_file, package_folder, output=self._output) mkdir( package_folder ) # Just in case it doesn't exist, because uncompress did nothing for file_name, file_path in zipped_files.items( ): # copy CONANINFO and CONANMANIFEST shutil.move(file_path, os.path.join(package_folder, file_name)) # Issue #214 https://github.com/conan-io/conan/issues/214 touch_folder(package_folder) if get_env("CONAN_READ_ONLY_CACHE", False): make_read_only(package_folder) recorder.package_downloaded(pref, remote.url) output.success('Package installed %s' % pref.id) output.info("Downloaded package revision %s" % pref.revision) except NotFoundException: raise PackageNotFoundException(pref) except BaseException as e: output.error("Exception while getting package: %s" % str(pref.id)) output.error("Exception: %s %s" % (type(e), str(e))) raise
def package_revision(self, pref): assert isinstance(pref, PackageReference) assert pref.ref.copy_clear_rev() == self._ref.copy_clear_rev() metadata = self.load_metadata() if pref.id not in metadata.packages: raise PackageNotFoundException(pref) return metadata.packages[pref.id].revision
def package_summary_hash(self, pref): package_folder = self.package(pref) try: read_manifest = FileTreeManifest.load(package_folder) except IOError: raise PackageNotFoundException(pref) return read_manifest.summary_hash
def get_package_file_list(self, pref, auth_user): self._authorizer.check_read_conan(auth_user, pref.ref) file_list = self._server_store.get_package_file_list(pref) if not file_list: raise PackageNotFoundException(pref, print_rev=True) # Send speculative metadata (empty) for files (non breaking future changes) return {"files": {key: {} for key in file_list}}
def _get_package_urls(self, pref): """Gets a dict of filename:contents from package""" url = self.router.package_download_urls(pref) urls = self._get_file_to_url_dict(url) if not urls: raise PackageNotFoundException(pref) return urls
def get_package_revisions(self, pref): url = self.router.package_revisions(pref) tmp = self.get_json(url)["revisions"] if pref.revision: for r in tmp: if r["revision"] == pref.revision: return [r] raise PackageNotFoundException(pref, print_rev=True) return tmp
def remove_packages(self, ref, package_ids): """ Remove any packages specified by package_ids""" self.check_credentials() if ref.revision is None: # Remove the packages from all the RREVs revisions = self.get_recipe_revisions(ref) refs = [ref.copy_with_rev(rev["revision"]) for rev in revisions] else: refs = [ref] for ref in refs: assert ref.revision is not None, "remove_packages needs RREV" if not package_ids: url = self.router.remove_all_packages(ref) response = self.requester.delete(url, auth=self.auth, verify=self.verify_ssl, headers=self.custom_headers) if response.status_code == 404: # Double check if it is a 404 because there are no packages try: package_search_url = self.router.search_packages(ref) if not self.get_json(package_search_url): return except Exception as e: logger.warning("Unexpected error searching {} packages" " in remote {}: {}".format( ref, self.remote_url, e)) if response.status_code != 200: # Error message is text # To be able to access ret.text (ret.content are bytes) response.charset = "utf-8" raise get_exception_from_error(response.status_code)( response.text) else: for pid in package_ids: pref = PackageReference(ref, pid) revisions = self.get_package_revisions(pref) prefs = [ pref.copy_with_revs(ref.revision, rev["revision"]) for rev in revisions ] for pref in prefs: url = self.router.remove_package(pref) response = self.requester.delete( url, auth=self.auth, headers=self.custom_headers, verify=self.verify_ssl) if response.status_code == 404: raise PackageNotFoundException(pref) if response.status_code != 200: # Error message is text # To be able to access ret.text (ret.content are bytes) response.charset = "utf-8" raise get_exception_from_error( response.status_code)(response.text)
def get_package_revisions(self, pref): time.sleep(0.1) # Index appears to not being updated immediately url = "{}/{}".format(self._repo_url, self._pref_index(pref)) response = requests.get(url, auth=self._auth) response.raise_for_status() the_json = response.json() if not the_json["revisions"]: raise PackageNotFoundException(pref) tmp = [_RevisionEntry(i["revision"], i["time"]) for i in the_json["revisions"]] return tmp
def get_package_upload_urls(self, pref, filesizes): """ :param pref: PackageReference :param filesizes: {filepath: bytes} :return {filepath: url} """ new_pref = self._get_latest_pref(pref) try: self._server_store.get_recipe_snapshot(new_pref.ref) except NotFoundException: raise PackageNotFoundException(new_pref) self._authorizer.check_write_package(self._auth_user, new_pref) urls = self._server_store.get_upload_package_urls(new_pref, filesizes, self._auth_user) return urls
def get_package_revisions(self, pref): """Returns a RevisionList""" assert pref.ref.revision is not None, "BUG: server store needs PREV get_package_revisions" if pref.revision: tmp = RevisionList() tmp.add_revision(pref.revision) return tmp.as_list() tmp = self._package_revisions_file(pref) ret = self._get_revisions_list(tmp).as_list() if not ret: raise PackageNotFoundException(pref, print_rev=True) return ret
def get_package_info(self, pref): """Gets a ConanInfo file from a package""" pref = pref.copy_with_revs(None, None) url = self.router.package_download_urls(pref) urls = self._get_file_to_url_dict(url) if not urls: raise PackageNotFoundException(pref) if CONANINFO not in urls: raise NotFoundException("Package %s doesn't have the %s file!" % (pref, CONANINFO)) # Get the info (in memory) contents = self._download_files({CONANINFO: urls[CONANINFO]}, quiet=True) # Unroll generator and decode shas (plain text) contents = {key: decode_text(value) for key, value in dict(contents).items()} return ConanInfo.loads(contents[CONANINFO])
def remove_packages(self, ref, package_ids=None): """ Remove any packages specified by package_ids""" self.check_credentials() if ref.revision is None: # Remove the packages from all the RREVs revisions = self.get_recipe_revisions(ref) refs = [ref.copy_with_rev(rev["revision"]) for rev in revisions] else: refs = [ref] for ref in refs: assert ref.revision is not None, "remove_packages needs RREV" if not package_ids: url = self.router.remove_all_packages(ref) response = self.requester.delete(url, auth=self.auth, headers=self.custom_headers, verify=self.verify_ssl) if response.status_code != 200: # Error message is text # To be able to access ret.text (ret.content are bytes) response.charset = "utf-8" raise get_exception_from_error(response.status_code)( response.text) else: for pid in package_ids: pref = PackageReference(ref, pid) revisions = self.get_package_revisions(pref) prefs = [ pref.copy_with_revs(ref.revision, rev["revision"]) for rev in revisions ] for pref in prefs: url = self.router.remove_package(pref) response = self.requester.delete( url, auth=self.auth, headers=self.custom_headers, verify=self.verify_ssl) if response.status_code == 404: raise PackageNotFoundException(pref) if response.status_code != 200: # Error message is text # To be able to access ret.text (ret.content are bytes) response.charset = "utf-8" raise get_exception_from_error( response.status_code)(response.text)
def remove_packages(self, package_layout, ids_filter=None): if not ids_filter: # Remove all path = package_layout.packages() # Necessary for short_paths removal for package_id in package_layout.package_ids(): pref = PackageReference(package_layout.ref, package_id) package_layout.package_remove(pref) self._remove(path, package_layout.ref, "packages") self._remove_file(package_layout.system_reqs(), package_layout.ref, SYSTEM_REQS) else: for package_id in ids_filter: # remove just the specified packages pref = PackageReference(package_layout.ref, package_id) if not package_layout.package_exists(pref): raise PackageNotFoundException(pref) package_layout.package_remove(pref) self._remove_file(package_layout.system_reqs_package(pref), package_layout.ref, "%s/%s" % (package_id, SYSTEM_REQS))
def get_package_download_urls(name, version, username, channel, package_id, auth_user): """ Get a dict with all packages files and the download url for each one """ conan_service = ConanService(app.authorizer, app.server_store, auth_user) ref = ConanFileReference(name, version, username, channel) pref = PackageReference(ref, package_id) try: urls = conan_service.get_package_download_urls(pref) except NotFoundException: raise PackageNotFoundException(pref) urls_norm = { filename.replace("\\", "/"): url for filename, url in urls.items() } return urls_norm
def get_package_manifest_url(name, version, username, channel, package_id, auth_user): """ Get a dict with all files and the download url """ conan_service = ConanService(app.authorizer, app.server_store, auth_user) ref = ConanFileReference(name, version, username, channel) pref = PackageReference(ref, package_id) urls = conan_service.get_package_download_urls( pref, [CONAN_MANIFEST]) if not urls: raise PackageNotFoundException(pref) urls_norm = { filename.replace("\\", "/"): url for filename, url in urls.items() } return urls_norm
def remove_packages(self, package_layout, ids_filter=None): if not ids_filter: # Remove all path = package_layout.packages() # Necessary for short_paths removal for package in package_layout.conan_packages(): self._remove(os.path.join(path, package), package_layout.ref, "package folder:%s" % package) self._remove(path, package_layout.ref, "packages") self._remove_file(package_layout.system_reqs(), package_layout.ref, SYSTEM_REQS) else: for id_ in ids_filter: # remove just the specified packages pref = PackageReference(package_layout.ref, id_) if not package_layout.package_exists(pref): raise PackageNotFoundException(pref) pkg_folder = package_layout.package(pref) self._remove(pkg_folder, package_layout.ref, "package:%s" % id_) self._remove_file(pkg_folder + ".dirty", package_layout.ref, "dirty flag") self._remove_file(package_layout.system_reqs_package(pref), package_layout.ref, "%s/%s" % (id_, SYSTEM_REQS))
def get_latest_package_revision(self, pref, auth_user): self._authorizer.check_read_conan(auth_user, pref.ref) tmp = self._server_store.get_last_package_revision(pref) if not tmp: raise PackageNotFoundException(pref, print_rev=True) return tmp