def parse_download_link(self, line, in_download): url = None for asset in line["assets"]: if "linux-{}.tar.gz".format(self.arch_trans[get_current_arch()]) in asset["browser_download_url"]: in_download = True url = asset["browser_download_url"] return (url, in_download)
def get_metadata_and_check_license(self, result): logger.debug("Fetched download page, parsing.") page = result[self.download_page] error_msg = page.error if error_msg: logger.error("An error occurred while downloading {}: {}".format( self.download_page, error_msg)) UI.return_main_screen(status_code=1) try: assets = json.loads(page.buffer.read().decode())["assets"] download_url = None for asset in assets: if "linux-{}".format(self.arch_trans[ get_current_arch()]) in asset["browser_download_url"]: download_url = asset["browser_download_url"] if not download_url: raise IndexError except (json.JSONDecodeError, IndexError): logger.error( "Can't parse the download URL from the download page.") UI.return_main_screen(status_code=1) logger.debug("Found download URL: " + download_url) self.download_requests.append(DownloadItem(download_url, None)) self.start_download_and_install()
def is_bucket_uptodate(self, bucket): """Check if the bucket is installed and up to date The bucket is a list of packages to check if installed.""" logger.debug("Check if {} is uptodate".format(bucket)) is_installed_and_uptodate = True for pkg_name in bucket: if ' | ' in pkg_name: for package in pkg_name.split(' | '): if self.is_bucket_available([package]): bucket.remove(pkg_name) bucket.append(package) pkg_name = package break # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't # understand that. strip :arch then if ":" in pkg_name: (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch(): pkg_name = pkg_without_arch_name if pkg_name not in self.cache or not self.cache[pkg_name].is_installed: logger.info("{} isn't installed".format(pkg_name)) is_installed_and_uptodate = False elif self.cache[pkg_name].is_upgradable: logger.info("We can update {}".format(pkg_name)) is_installed_and_uptodate = False return is_installed_and_uptodate
def parse_download_link(self, line, in_download): """Parse Flutter SDK download links""" in_download = True url = "https://storage.googleapis.com/dart-archive/channels/stable/" + \ "release/{}/sdk/".format(line["version"]) +\ "dartsdk-linux-{}-release.zip".format(self.arch_trans[get_current_arch()]) return (url, in_download)
def post_install(self): """Add rust necessary env variables""" add_env_to_user( self.name, { "PATH": { "value": "{}:{}:{}".format( os.path.join(self.install_path, "rustc", "bin"), os.path.join(self.install_path, "cargo", "bin"), "$HOME/.cargo/bin") } }) # adjust for rust: some symlinks magic to have stdlib craft available arch_lib_folder = '{}-unknown-linux-gnu'.format( self.arch_trans[get_current_arch()]) lib_folder = os.path.join(self.install_path, 'rust-std-{}'.format(arch_lib_folder), 'lib', 'rustlib', arch_lib_folder, 'lib') arch_dest_lib_folder = os.path.join(self.install_path, 'rustc', 'lib', 'rustlib', arch_lib_folder, 'lib') os.mkdir(arch_dest_lib_folder) for f in os.listdir(lib_folder): os.symlink(os.path.join(lib_folder, f), os.path.join(arch_dest_lib_folder, f)) UI.delayed_display( DisplayMessage(self.RELOGIN_REQUIRE_MSG.format(self.name)))
def get_sha_and_start_download(self, download_result): res = download_result[self.checksum_url].buffer.getvalue().decode() line = re.search( r'.*linux{}.tar.xz'.format(self.arch_trans[get_current_arch()]), res).group(0) # you get and store url and checksum checksum = line.split()[0] url = os.path.join( self.checksum_url.rpartition('/')[0], line.split()[1]) if url is None: logger.error( "Download page changed its syntax or is not parsable (missing url)" ) UI.return_main_screen(status_code=1) if checksum is None: logger.error( "Download page changed its syntax or is not parsable (missing sha512)" ) UI.return_main_screen(status_code=1) logger.debug("Found download link for {}, checksum: {}".format( url, checksum)) self.download_requests.append( DownloadItem(url, Checksum(self.checksum_type, checksum))) # add the user to arduino group if not self.was_in_arduino_group: with futures.ProcessPoolExecutor(max_workers=1) as executor: f = executor.submit(_add_to_group, self._current_user, self.ARDUINO_GROUP) if not f.result(): UI.return_main_screen(status_code=1) self.start_download_and_install()
def get_sha_and_start_download(self, download_result): res = download_result[self.new_download_url].buffer.getvalue().decode() line = re.search(r'.*linux{}.tar.xz'.format(self.arch_trans[get_current_arch()]), res).group(0) # you get and store url and checksum checksum = line.split()[0] url = os.path.join(self.new_download_url.rpartition('/')[0], line.split()[1]) self.check_data_and_start_download(url, checksum)
def is_bucket_installed(self, bucket): """Check if the bucket is installed The bucket is a list of packages to check if installed.""" logger.debug("Check if {} is installed".format(bucket)) is_installed = True for pkg_name in bucket: if ' | ' in pkg_name: for package in pkg_name.split(' | '): if self.is_bucket_installed([package]): bucket.remove(pkg_name) bucket.append(package) pkg_name = package break # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't # understand that. strip :arch then if ":" in pkg_name: (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch(): pkg_name = pkg_without_arch_name if pkg_name not in self.cache or not self.cache[ pkg_name].is_installed: logger.info("{} isn't installed".format(pkg_name)) is_installed = False return is_installed
def is_bucket_uptodate(self, bucket): """Check if the bucket is installed and up to date The bucket is a list of packages to check if installed.""" logger.debug("Check if {} is up to date".format(bucket)) is_installed_and_uptodate = True for pkg_name in bucket: if ' | ' in pkg_name: for package in pkg_name.split(' | '): if self.is_bucket_available([package]): bucket.remove(pkg_name) bucket.append(package) pkg_name = package break # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't # understand that. strip :arch then if ":" in pkg_name: (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch(): pkg_name = pkg_without_arch_name if pkg_name not in self.cache or not self.cache[ pkg_name].is_installed: logger.info("{} isn't installed".format(pkg_name)) is_installed_and_uptodate = False elif self.cache[pkg_name].is_upgradable: logger.info("We can update {}".format(pkg_name)) is_installed_and_uptodate = False if "openjdk" in pkg_name: if self.check_java_equiv(pkg_name): is_installed_and_uptodate = True return is_installed_and_uptodate
def is_installable(self): """Return if the framework can be installed on that arch""" if self.only_for_removal: return False try: if len(self.only_on_archs) > 0: # we have some restricted archs, check we support it current_arch = get_current_arch() if current_arch not in self.only_on_archs: logger.debug( "{} only supports {} archs and you are on {}.".format( self.name, self.only_on_archs, current_arch)) return False if self.only_ubuntu: # set framework installable only on ubuntu if get_current_distro_id() != "ubuntu": return False if len(self.only_ubuntu_version) > 0: current_version = get_current_distro_version() if current_version not in self.only_ubuntu_version: logger.debug( "{} only supports {} and you are on {}.".format( self.name, self.only_ubuntu_version, current_version)) return False if not RequirementsHandler().is_bucket_available( self.packages_requirements): return False except: logger.error( "An error occurred when detecting platform, don't register {}". format(self.name)) return False return True
def is_bucket_available(self, bucket): """Check if bucket available on the platform""" all_in_cache = True for pkg_name in bucket: if ' | ' in pkg_name: for package in pkg_name.split(' | '): if self.is_bucket_available([package]): bucket.remove(pkg_name) bucket.append(package) pkg_name = package break if pkg_name not in self.cache: # this can be also a foo:arch and we don't have <arch> added. Tell is may be available if ":" in pkg_name: # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't # understand that. strip :arch then (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch( ) and pkg_without_arch_name in self.cache: # false positive, available continue elif arch not in get_foreign_archs( ): # relax the constraint logger.info( "{} isn't available on this platform, but {} isn't enabled. So it may be available " "later on".format(pkg_name, arch)) continue logger.info( "{} isn't available on this platform".format(pkg_name)) all_in_cache = False return all_in_cache
def is_bucket_available(self, bucket): """Check if bucket available on the platform""" all_in_cache = True for pkg_name in bucket: if ' | ' in pkg_name: for package in pkg_name.split(' | '): if self.is_bucket_available([package]): bucket.remove(pkg_name) bucket.append(package) pkg_name = package break if pkg_name not in self.cache: # this can be also a foo:arch and we don't have <arch> added. Tell is may be available if ":" in pkg_name: # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't # understand that. strip :arch then (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch() and pkg_without_arch_name in self.cache: # false positive, available continue elif arch not in get_foreign_archs(): # relax the constraint logger.info("{} isn't available on this platform, but {} isn't enabled. So it may be available " "later on".format(pkg_name, arch)) continue logger.info("{} isn't available on this platform".format(pkg_name)) all_in_cache = False return all_in_cache
def parse_download_link(self, line, in_download): """Parse Twine download links""" url = None for asset in line["assets"]: if 'linux{}'.format(self.arch_trans[get_current_arch()]) in asset["browser_download_url"]: in_download = True url = asset["browser_download_url"] return (url, in_download)
def parse_download_link(self, line, in_download): """Parse SublimeText download links""" url = None if '{}.tar.bz2'.format(self.arch_trans[get_current_arch()]) in line: p = re.search(r'also available as a <a href="(.*.tar.bz2)"', line) with suppress(AttributeError): url = p.group(1) return ((url, None), in_download)
def test_is_bucket_uptodate_multi_arch_current_arch(self): """Installed bucket should return as being uptodate even if contains multi-arch part with current package""" self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback) self.wait_for_callback(self.done_callback) self.assertTrue( self.handler.is_bucket_uptodate( ["testpackage:{}".format(tools.get_current_arch())]))
def test_install_multi_arch_current_arch(self): """We install a multi_arch package corresponding to current arch""" multi_arch_name = "testpackage:{}".format(tools.get_current_arch()) self.handler.install_bucket([multi_arch_name], lambda x: "", self.done_callback) self.wait_for_callback(self.done_callback) self.assertEqual(self.done_callback.call_args[0][0].bucket, [multi_arch_name]) self.assertIsNone(self.done_callback.call_args[0][0].error) self.assertTrue(self.handler.is_bucket_installed(["testpackage"]))
def parse_download_link(self, line, in_download): """Parse Rust download link, expect to find a url""" url = None if '{}-unknown-linux-gnu.tar.gz">'.format(self.arch_trans[get_current_arch()]) in line: p = re.search(r'href="(.*)">', line) with suppress(AttributeError): url = p.group(1) logger.debug("Found link: {}".format(url)) return ((url, None), in_download)
def _really_install_bucket(self, current_bucket): """Really install current bucket and bind signals""" bucket = current_bucket["bucket"] logger.debug("Starting {} installation".format(bucket)) # exchange file output for apt and dpkg after the fork() call (open it empty) self.apt_fd = tempfile.NamedTemporaryFile(delete=False) self.apt_fd.close() if self.is_bucket_uptodate(bucket): return True need_cache_reload = False for pkg_name in bucket: if ":" in pkg_name: arch = pkg_name.split(":", -1)[-1] need_cache_reload = need_cache_reload or add_foreign_arch(arch) if need_cache_reload: with as_root(): self._force_reload_apt_cache() self.cache.update() self._force_reload_apt_cache() # mark for install and so on for pkg_name in bucket: # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't understand that # strip :arch then if ":" in pkg_name: (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch(): pkg_name = pkg_without_arch_name try: pkg = self.cache[pkg_name] if pkg.is_installed and pkg.is_upgradable: logger.debug("Marking {} for upgrade".format(pkg_name)) pkg.mark_upgrade() else: logger.debug("Marking {} for install".format(pkg_name)) pkg.mark_install(auto_fix=False) except Exception as msg: message = "Can't mark for install {}: {}".format(pkg_name, msg) raise BaseException(message) # this can raise on installedArchives() exception if the commit() fails with as_root(): self.cache.commit(fetch_progress=self._FetchProgress( current_bucket, self.STATUS_DOWNLOADING, current_bucket["progress_callback"]), install_progress=self._InstallProgress( current_bucket, self.STATUS_INSTALLING, current_bucket["progress_callback"], self._force_reload_apt_cache, self.apt_fd.name)) return True
def _really_install_bucket(self, current_bucket): """Really install current bucket and bind signals""" bucket = current_bucket["bucket"] logger.debug("Starting {} installation".format(bucket)) # exchange file output for apt and dpkg after the fork() call (open it empty) self.apt_fd = tempfile.NamedTemporaryFile(delete=False) self.apt_fd.close() if self.is_bucket_uptodate(bucket): return True need_cache_reload = False for pkg_name in bucket: if ":" in pkg_name: arch = pkg_name.split(":", -1)[-1] need_cache_reload = need_cache_reload or add_foreign_arch(arch) if need_cache_reload: with as_root(): self._force_reload_apt_cache() self.cache.update() self._force_reload_apt_cache() # mark for install and so on for pkg_name in bucket: # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't understand that # strip :arch then if ":" in pkg_name: (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch(): pkg_name = pkg_without_arch_name try: pkg = self.cache[pkg_name] if pkg.is_installed and pkg.is_upgradable: logger.debug("Marking {} for upgrade".format(pkg_name)) pkg.mark_upgrade() else: logger.debug("Marking {} for install".format(pkg_name)) pkg.mark_install(auto_fix=False) except Exception as msg: message = "Can't mark for install {}: {}".format(pkg_name, msg) raise BaseException(message) # this can raise on installedArchives() exception if the commit() fails with as_root(): self.cache.commit(fetch_progress=self._FetchProgress(current_bucket, self.STATUS_DOWNLOADING, current_bucket["progress_callback"]), install_progress=self._InstallProgress(current_bucket, self.STATUS_INSTALLING, current_bucket["progress_callback"], self._force_reload_apt_cache, self.apt_fd.name)) return True
def parse_download_link(self, line, in_download): """Parse SublimeText download links""" url = None if '.tar.xz' in line: p = re.search( r'href="([^<]*{}.tar.xz)"'.format( self.arch_trans[get_current_arch()]), line) with suppress(AttributeError): url = p.group(1) return ((url, None), in_download)
def parse_download_link(self, line, in_download): """Parse Twine download links""" url = None regexp = r'href="(.*)" .*linux64' if get_current_arch() == "i386": regexp = r'href="(.*)" .*linux32' p = re.search(regexp, line) with suppress(AttributeError): url = p.group(1) return ((url, None), False)
def parse_download_link(self, line, in_download): """Parse Rust download link, expect to find a url""" url = None if '{}-unknown-linux-gnu.tar.gz">'.format( self.arch_trans[get_current_arch()]) in line: p = re.search(r'href="(\S+)">', line) with suppress(AttributeError): url = p.group(1) logger.debug("Found link: {}".format(url)) return ((url, None), in_download)
def get_checksum_and_start_download(self, download_result): for line in download_result[self.checksum_url].buffer: line = line.decode() if '{}.tar.bz2'.format( self.arch_trans[get_current_arch()]) in line: break checksum = line.split()[0] self.download_requests.append( DownloadItem(self.url, Checksum(self.checksum_type, checksum))) self.start_download_and_install()
def parse_download_link(self, line, in_download): """Parse Terraform download links""" url = None tag_name = line["tag_name"] if tag_name: version = tag_name[1:] arch = self.arch_trans[get_current_arch()] url = "https://releases.hashicorp.com/terraform/{version}/terraform_{version}_linux_{arch}.zip".format( version=version, arch=arch) return url, in_download
def __init__(self, category): super().__init__( name="Rust Lang", description=_("The official Rust distribution"), is_category_default=True, category=category, only_on_archs=['i386', 'amd64'], download_page= "https://www.rust-lang.org/en-US/other-installers.html", dir_to_decompress_in_tarball="rust-*") self.arch = get_current_arch()
def parse_download_link(self, line, in_download): """Parse Nodejs download link, expect to find a sha1 and a url""" url, shasum = (None, None) arch = get_current_arch() if "linux-{}.tar.xz".format(self.arch_trans[arch]) in line: in_download = True if in_download: url = self.download_page.strip("SHASUMS256.txt.asc") + line.split()[1].rstrip() shasum = line.split()[0] if url is None and shasum is None: return (None, in_download) return ((url, shasum), in_download)
def parse_download_link(self, line, in_download): """Parse Blender download links""" url = None if '.tar.bz2' in line: p = re.search(r'href=\"(https://www\.blender\.org/[^<]*{}\.tar\.bz2)/?"'.format( self.arch_trans[get_current_arch()]), line) with suppress(AttributeError): url = p.group(1) filename = 'release' + re.search('blender-(.*)-linux', url).group(1).replace('.', '') + '.md5' self.checksum_url = os.path.join(os.path.dirname(url), filename).replace('download', 'release').replace('www', 'download') url = url.replace('www.blender.org/download', 'download.blender.org/release') return ((url, None), in_download)
def parse_download_link(self, line, in_download): """Parse Godot download links""" url = None if '{}.zip'.format(self.arch_trans[get_current_arch()]) in line: in_download = True p = re.search(r'href=\"(.*\.zip)\"', line) with suppress(AttributeError): url = p.group(1) bin = re.search(r'(Godot.*)\.zip', url) self.required_files_path[0] = bin.group(1) if url is None: return (None, in_download) return ((url, None), in_download)
def parse_download_link(self, line, in_download): """Parse PhantomJS download link, expect to find a sha and a url""" url = None string = 'linux-{}.tar.bz2">'.format(self.arch_trans[get_current_arch()]) if string in line: in_download = True if in_download is True: p = re.search(r'href="(.*)">', line) with suppress(AttributeError): url = p.group(1) if url is None: return (None, in_download) return ((url, None), in_download)
def post_install(self): """Add rust necessary env variables""" add_env_to_user(self.name, {"PATH": {"value": "{}:{}".format(os.path.join(self.install_path, "rustc", "bin"), os.path.join(self.install_path, "cargo", "bin"))}, "LD_LIBRARY_PATH": {"value": os.path.join(self.install_path, "rustc", "lib")}}) # adjust for rust: some symlinks magic to have stdlib craft available arch_lib_folder = '{}-unknown-linux-gnu'.format(self.arch_trans[get_current_arch()]) lib_folder = os.path.join(self.install_path, 'rust-std-{}'.format(arch_lib_folder), 'lib', 'rustlib', arch_lib_folder, 'lib') for f in os.listdir(lib_folder): os.symlink(os.path.join(lib_folder, f), os.path.join(self.install_path, 'rustc', 'lib', 'rustlib', arch_lib_folder, 'lib', f)) UI.delayed_display(DisplayMessage(self.RELOGIN_REQUIRE_MSG.format(self.name)))
def get_metadata_and_check_license(self, result): """Override this so we can use BS and fetch the checksum separately.""" logger.debug("Fetched download page, parsing.") page = result[self.download_page] error_msg = page.error if error_msg: logger.error("An error occurred while downloading {}: {}".format( self.download_page_url, error_msg)) UI.return_main_screen(status_code=1) soup = BeautifulSoup(page.buffer, 'html.parser') link = (soup.find('div', class_="install").find( 'td', class_="inst-type", text="Linux (.tar.gz)").parent.find( text=self.arch_trans[get_current_arch()]).parent.parent) if link is None: logger.error( "Can't parse the download URL from the download page.") UI.return_main_screen(status_code=1) download_url = link.attrs['href'] checksum_url = download_url + '.sha256' logger.debug("Found download URL: " + download_url) logger.debug("Downloading checksum first, from " + checksum_url) def checksum_downloaded(results): checksum_result = next(iter( results.values())) # Just get the first. if checksum_result.error: logger.error(checksum_result.error) UI.return_main_screen(status_code=1) checksum = checksum_result.buffer.getvalue().decode( 'utf-8').split()[0] logger.info('Obtained SHA256 checksum: ' + checksum) self.download_requests.append( DownloadItem(download_url, checksum=Checksum(ChecksumType.sha256, checksum), ignore_encoding=True)) self.start_download_and_install() DownloadCenter([DownloadItem(checksum_url)], on_done=checksum_downloaded, download=False)
def is_bucket_installed(self, bucket): """Check if the bucket is installed The bucket is a list of packages to check if installed.""" logger.debug("Check if {} is installed".format(bucket)) is_installed = True for pkg_name in bucket: # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't # understand that. strip :arch then if ":" in pkg_name: (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch(): pkg_name = pkg_without_arch_name if pkg_name not in self.cache or not self.cache[pkg_name].is_installed: logger.info("{} isn't installed".format(pkg_name)) is_installed = False return is_installed
def parse_download_link(self, line, in_download): """Parse Superpowers download links. We parse from the beginning to the end, we will always have the latest download link""" url = None if "-linux-" in line: in_download = True if in_download: regexp = r'href="(.*-{}.zip)"'.format(self.arch_trans[get_current_arch()]) p = re.search(regexp, line) with suppress(AttributeError): url = p.group(1) url = "{}{}".format(self.download_page[:self.download_page.find("superpowers/") - 1], url) return ((url, None), False)
def parse_download_link(self, line, in_download): """Parse Go download link, expect to find a sha1 and a url""" url, sha1 = (None, None) if "linux-{}".format(get_current_arch().replace("i386", "386")) in line: in_download = True if in_download: p = re.search(r'href="(.*)">', line) with suppress(AttributeError): url = p.group(1) p = re.search(r'<td><tt>(\w+)</tt></td>', line) with suppress(AttributeError): sha1 = p.group(1) if "</tr>" in line: in_download = False if url is None and sha1 is None: return (None, in_download) return ((url, sha1), in_download)
def parse_download_link(self, line, in_download): """Parse Blender download links""" url = None if '.tar.bz2' in line: p = re.search( r'href="(http://download.blender.org/[^<]*{}.tar.bz2)"'.format( self.arch_trans[get_current_arch()]), line) with suppress(AttributeError): url = p.group(1) filename = 'release' + re.search('blender-(.*)-linux', url).group(1).replace( '.', '') + '.md5' self.checksum_url = os.path.join(os.path.dirname(url), filename) DownloadCenter(urls=[DownloadItem(self.checksum_url, None)], on_done=self.get_checksum_and_start_download, download=False) return (url, in_download)
def parse_download_link(self, line, in_download): """Parse Stencyl download links""" url, md5sum = (None, None) if ">Linux <" in line: in_download = True if in_download: regexp = r'href="(.*)"><.*64-' if get_current_arch() == "i386": regexp = r'href="(.*)"><.*32-' p = re.search(regexp, line) with suppress(AttributeError): url = p.group(1) if '<div class="spacer"><br/><br/>' in line: in_download = False if url is None: return (None, in_download) return ((url, None), in_download)
def parse_download_link(self, line, in_download): """Parse Go download link, expect to find a sha and a url""" url, sha = (None, None) if "linux-{}".format(get_current_arch().replace("i386", "386")) in line: in_download = True if in_download: p = re.search(r'href="(.*)">', line) with suppress(AttributeError): url = p.group(1) p = re.search(r'<td><tt>(\w+)</tt></td>', line) with suppress(AttributeError): sha = p.group(1) if "</tr>" in line: in_download = False if url is None and sha is None: return (None, in_download) return ((url, sha), in_download)
def get_metadata_and_check_license(self, result): """Override this so we can use BS and fetch the checksum separately.""" logger.debug("Fetched download page, parsing.") page = result[self.download_page] error_msg = page.error if error_msg: logger.error("An error occurred while downloading {}: {}".format(self.download_page_url, error_msg)) UI.return_main_screen(status_code=1) soup = BeautifulSoup(page.buffer, 'html.parser') link = (soup.find('div', class_="install") .find('td', class_="inst-type", text="Linux (.tar.gz)") .parent .find(text=self.arch_trans[get_current_arch()]) .parent .parent) if link is None: logger.error("Can't parse the download URL from the download page.") UI.return_main_screen(status_code=1) download_url = link.attrs['href'] checksum_url = download_url + '.sha256' logger.debug("Found download URL: " + download_url) logger.debug("Downloading checksum first, from " + checksum_url) def checksum_downloaded(results): checksum_result = next(iter(results.values())) # Just get the first. if checksum_result.error: logger.error(checksum_result.error) UI.return_main_screen(status_code=1) checksum = checksum_result.buffer.getvalue().decode('utf-8').split()[0] logger.info('Obtained SHA256 checksum: ' + checksum) self.download_requests.append(DownloadItem(download_url, checksum=Checksum(ChecksumType.sha256, checksum), ignore_encoding=True)) self.start_download_and_install() DownloadCenter([DownloadItem(checksum_url)], on_done=checksum_downloaded, download=False)
def parse_download_link(self, line, in_download): """Parse Rust download link, expect to find a url""" url, sha1 = (None, None) arch = get_current_arch() if "{}-unknown-linux-gnu.tar.gz".format(self.arch_trans[arch]) in line: in_download = True if in_download: p = re.search(r'href="(.*)">', line) with suppress(AttributeError): url = p.group(1) p = re.search(r'<td><tt>(\w+)</tt></td>', line) with suppress(AttributeError): sha1 = p.group(1) if "</tr>" in line: in_download = False if url is None and sha1 is None: return (None, in_download) return ((url, sha1), in_download)
def get_metadata_and_check_license(self, result): logger.debug("Fetched download page, parsing.") page = result[self.download_page] error_msg = page.error if error_msg: logger.error("An error occurred while downloading {}: {}".format(self.download_page, error_msg)) UI.return_main_screen(status_code=1) try: assets = json.loads(page.buffer.read().decode())["assets"] for asset in assets: if "linux-{}".format(self.arch_trans[get_current_arch()]) in asset["browser_download_url"]: download_url = asset["browser_download_url"] except (json.JSONDecodeError, IndexError): logger.error("Can't parse the download URL from the download page.") UI.return_main_screen(status_code=1) logger.debug("Found download URL: " + download_url) self.download_requests.append(DownloadItem(download_url, None)) self.start_download_and_install()
def is_installable(self): """Return if the framework can be installed on that arch""" try: if len(self.only_on_archs) > 0: # we have some restricted archs, check we support it current_arch = get_current_arch() if current_arch not in self.only_on_archs: logger.debug("{} only supports {} archs and you are on {}.".format(self.name, self.only_on_archs, current_arch)) return False if len(self.only_ubuntu_version) > 0: current_version = get_current_ubuntu_version() if current_version not in self.only_ubuntu_version: logger.debug("{} only supports {} and you are on {}.".format(self.name, self.only_ubuntu_version, current_version)) return False if not RequirementsHandler().is_bucket_available(self.packages_requirements): return False except: logger.error("An error occurred when detecting platform, don't register {}".format(self.name)) return False return True
def _really_install_bucket(self, current_bucket): """Really install current bucket and bind signals""" bucket = current_bucket["bucket"] logger.debug("Starting {} installation".format(bucket)) # exchange file output for apt and dpkg after the fork() call (open it empty) self.apt_fd = tempfile.NamedTemporaryFile(delete=False) self.apt_fd.close() if self.is_bucket_uptodate(bucket): return True for pkg_name in bucket: if ":" in pkg_name: arch = pkg_name.split(":", -1)[-1] # try to add the arch if arch not in get_foreign_archs() and arch != get_current_arch(): logger.info("Adding foreign arch: {}".format(arch)) with open(os.devnull, "w") as f: try: os.seteuid(0) os.setegid(0) if subprocess.call(["dpkg", "--add-architecture", arch], stdout=f) != 0: msg = "Can't add foreign foreign architecture {}".format(arch) raise BaseException(msg) self.cache.update() finally: switch_to_current_user() self._force_reload_apt_cache() # mark for install and so on for pkg_name in bucket: # /!\ danger: if current arch == ':appended_arch', on a non multiarch system, dpkg doesn't understand that # strip :arch then if ":" in pkg_name: (pkg_without_arch_name, arch) = pkg_name.split(":", -1) if arch == get_current_arch(): pkg_name = pkg_without_arch_name try: pkg = self.cache[pkg_name] if pkg.is_installed and pkg.is_upgradable: logger.debug("Marking {} for upgrade".format(pkg_name)) pkg.mark_upgrade() else: logger.debug("Marking {} for install".format(pkg_name)) pkg.mark_install(auto_fix=False) except Exception as msg: message = "Can't mark for install {}: {}".format(pkg_name, msg) raise BaseException(message) # this can raise on installedArchives() exception if the commit() fails try: os.seteuid(0) os.setegid(0) self.cache.commit(fetch_progress=self._FetchProgress(current_bucket, self.STATUS_DOWNLOADING, current_bucket["progress_callback"]), install_progress=self._InstallProgress(current_bucket, self.STATUS_INSTALLING, current_bucket["progress_callback"], self._force_reload_apt_cache, self.apt_fd.name)) finally: switch_to_current_user() return True
def parse_download_link(self, line, in_download): """Parse Dart SDK download links""" in_download = False if '(stable)' in line: p = re.search(r"([\d\.]+)( )*\(stable\)", line) if p is not None: in_download = True if in_download: with suppress(AttributeError): self.new_download_url = "https://storage.googleapis.com/dart-archive/channels/stable/" +\ "release/{}/sdk/".format(p.group(1)) +\ "dartsdk-linux-{}-release.zip".format(self.arch_trans[get_current_arch()]) +\ ".sha256sum" return ((None, None), in_download)
def parse_download_link(self, line, in_download): """We have persistent links for Stencyl, return it right away""" url = self.PERM_DOWNLOAD_LINKS[get_current_arch()] return ((url, None), in_download)
def test_get_current_arch(self): """Current arch is reported""" with self.create_dpkg("echo fooarch"): self.assertEqual(get_current_arch(), "fooarch")
def test_get_current_arch_twice(self): """Current arch is reported twice and the same""" with self.create_dpkg("echo fooarch"): self.assertEqual(get_current_arch(), "fooarch") self.assertEqual(get_current_arch(), "fooarch")
def test_is_bucket_uptodate_multi_arch_current_arch(self): """Installed bucket should return as being uptodate even if contains multi-arch part with current package""" self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback) self.wait_for_callback(self.done_callback) self.assertTrue(self.handler.is_bucket_uptodate(["testpackage:{}".format(tools.get_current_arch())]))
def test_is_bucket_available_multi_arch_current_arch(self): """We return a package is available on the current platform""" self.assertTrue(self.handler.is_bucket_available(['testpackage:{}'.format(tools.get_current_arch())]))