def download(self, filepath: str = None) -> str: if filepath is None: self.file = os.path.join(self.source_dir, os.path.basename(self.source)) else: self.file = filepath # First check if we already have the source file cached. file_cache = FileCache() if self.source_checksum: algorithm, hash = split_checksum(self.source_checksum) cache_file = file_cache.get(algorithm=algorithm, hash=hash) if cache_file: # We make this copy as the provisioning logic can delete # this file and we don't want that. shutil.copy2(cache_file, self.file) return self.file # If not we download and store if snapcraft.internal.common.get_url_scheme(self.source) == "ftp": download_urllib_source(self.source, self.file) else: request = requests.get(self.source, stream=True, allow_redirects=True) request.raise_for_status() download_requests_stream(request, self.file) # We verify the file if source_checksum is defined # and we cache the file for future reuse. if self.source_checksum: algorithm, digest = verify_checksum(self.source_checksum, self.file) file_cache.cache(filename=self.file, algorithm=algorithm, hash=hash) return self.file
def download(self): # First check if we already have the source file cached. file_cache = FileCache() if self.source_checksum: algorithm, hash = split_checksum(self.source_checksum) cache_file = file_cache.get(algorithm=algorithm, hash=hash) if cache_file: self.file = os.path.join(self.source_dir, os.path.basename(cache_file)) # We make this copy as the provisioning logic can delete # this file and we don't want that. shutil.copy2(cache_file, self.file) return self.file # If not we download and store self.file = os.path.join(self.source_dir, os.path.basename(self.source)) if snapcraft.internal.common.get_url_scheme(self.source) == "ftp": download_urllib_source(self.source, self.file) else: request = requests.get(self.source, stream=True, allow_redirects=True) request.raise_for_status() download_requests_stream(request, self.file) # We verify the file if source_checksum is defined # and we cache the file for future reuse. if self.source_checksum: algorithm, digest = verify_checksum(self.source_checksum, self.file) file_cache.cache(filename=self.file, algorithm=algorithm, hash=hash) return self.file
def download(self): self.file = os.path.join( self.source_dir, os.path.basename(self.source)) if common.get_url_scheme(self.source) == 'ftp': download_urllib_source(self.source, self.file) else: request = requests.get( self.source, stream=True, allow_redirects=True) request.raise_for_status() download_requests_stream(request, self.file)
def download(self): self.file = os.path.join( self.source_dir, os.path.basename(self.source)) if snapcraft.internal.common.get_url_scheme(self.source) == 'ftp': download_urllib_source(self.source, self.file) else: request = requests.get( self.source, stream=True, allow_redirects=True) request.raise_for_status() download_requests_stream(request, self.file)
def pull(self): super().pull() logger.info("Using reference initrd: {}".format(self.snap_url)) is_source_url = snapcraft.internal.common.isurl(self.snap_url) # TODO: this should be eventually pulled from snap store # for now check if url is valid and use it # If not we try to download it from store if is_source_url: download_urllib_source(self.snap_url, self.vanilla_initrd_snap) else: snapcraft.download(_INITRD_SNAP_NAME, risk="stable", track=self.uc_series, download_path=self.vanilla_initrd_snap, arch=self.initrd_arch)
def test_download_urllib_source(self): indicators.download_urllib_source(self.source, self.dest_file) self.assertTrue(os.path.exists(self.dest_file))