def test_download_existing_file(self, url, content): """Test downloading existing file""" local_file = 'local_file' DownloadHelper.download_file(url, local_file) assert os.path.isfile(local_file) with open(local_file) as f: assert f.readline().strip() == content
def download_remote_sources(self): """ Method that iterates over all sources and downloads ones, which contain URL instead of just a file. :return: None """ try: # try to download old sources from Fedora lookaside cache LookasideCacheHelper.download('fedpkg', os.path.dirname(self.path), self.get_package_name()) except LookasideCacheError as e: logger.debug("Downloading sources from lookaside cache failed. " "Reason: '{}'.".format(str(e))) # filter out only sources with URL remote_files = [ source for source in self.sources if bool(urllib.parse.urlparse(source).scheme) ] # download any sources that are not yet downloaded for remote_file in remote_files: local_file = os.path.join(self.sources_location, os.path.basename(remote_file)) if not os.path.isfile(local_file): logger.debug( "File '%s' doesn't exist locally, downloading it.", local_file) try: DownloadHelper.download_file(remote_file, local_file) except DownloadError as e: raise RebaseHelperError( "Failed to download file from URL {}. " "Reason: '{}'. ".format(remote_file, str(e)))
def download_remote_sources(self): """ Method that iterates over all sources and downloads ones, which contain URL instead of just a file. :return: None """ try: # try to download old sources from Fedora lookaside cache LookasideCacheHelper.download('fedpkg', os.path.dirname(self.path), self.get_package_name()) except LookasideCacheError as e: logger.debug("Downloading sources from lookaside cache failed. " "Reason: '{}'.".format(str(e))) # filter out only sources with URL remote_files = [source for source in self.sources if bool(urllib.parse.urlparse(source).scheme)] # download any sources that are not yet downloaded for remote_file in remote_files: local_file = os.path.join(self.sources_location, os.path.basename(remote_file)) if not os.path.isfile(local_file): logger.debug("File '%s' doesn't exist locally, downloading it.", local_file) try: DownloadHelper.download_file(remote_file, local_file) except DownloadError as e: raise RebaseHelperError("Failed to download file from URL {}. " "Reason: '{}'. ".format(remote_file, str(e)))
def _get_initial_sources_list(self): """Function returns all sources mentioned in SPEC file""" # get all regular sources sources = [] tar_sources = [] sources_list = [x for x in self.spc.sources if x[2] == 1] remote_files_re = re.compile(r'(http:|https:|ftp:)//.*') for index, src in enumerate(sorted(sources_list, key=lambda source: source[1])): # src is type of (SOURCE, Index of source, Type of source (PAtch, Source) # We need to download all archives and only the one abs_path = os.path.join(self.sources_location, os.path.basename(src[0]).strip()) sources.append(abs_path) archive = [x for x in Archive.get_supported_archives() if src[0].endswith(x)] # if the source is a remote file, download it if archive: if remote_files_re.search(src[0]) and self.download and not os.path.isfile(abs_path): logger.debug("Tarball is not in absolute path {} " "trying to download one from URL {}".format(abs_path, src[0])) logger.info("Tarball is not in absolute path. Trying to download it from URL") try: DownloadHelper.download_file(src[0], abs_path) except DownloadError as e: raise RebaseHelperError("Failed to download file from URL {}. " "Reason: '{}'. ".format(src[0], str(e))) tar_sources.append(abs_path) return sources, tar_sources
def test_progress_float(self, monkeypatch): """ Test that progress of a download is shown correctly. Test the case when size parameters are passed as floats. """ buffer = StringIO() monkeypatch.setattr("sys.stdout", buffer) monkeypatch.setattr("time.time", lambda: 10.0) DownloadHelper.progress(100.0, 25.0, 0.0) assert buffer.getvalue() == "\r 25%[=======> ] 25.00 eta 00:00:30 "
def test_progress_unknown_total_size(self, monkeypatch): """ Test that progress of a download is shown correctly. Test the case when total download size is not known. """ buffer = StringIO() monkeypatch.setattr("sys.stdout", buffer) monkeypatch.setattr("time.time", lambda: 10.0) DownloadHelper.progress(-1, 1024 * 1024, 0.0) assert buffer.getvalue() == "\r [ <=> ] 1.00M in 00:00:10 "
def test_progress(self, total, downloaded, output, monkeypatch): """ Test that progress of a download is shown correctly. Test the case when size parameters are passed as integers. """ buf = StringIO() monkeypatch.setattr('sys.stdout', buf) monkeypatch.setattr('time.time', lambda: 10.0) DownloadHelper.progress(total, downloaded, 0.0) assert buf.getvalue() == output
def test_progress_float(self, monkeypatch): """ Test that progress of a download is shown correctly. Test the case when size parameters are passed as floats. """ buffer = StringIO() monkeypatch.setattr('sys.stdout', buffer) monkeypatch.setattr('time.time', lambda: 10.0) DownloadHelper.progress(100.0, 25.0, 0.0) assert buffer.getvalue() == ' 25% (00:00:30 remaining)\r'
def test_progress_float(self, monkeypatch): """ Test that progress of a download is shown correctly. Test the case when size parameters are passed as floats. """ buffer = StringIO() monkeypatch.setattr('sys.stdout', buffer) monkeypatch.setattr('time.time', lambda: 10.0) DownloadHelper.progress(100.0, 25.0, 0.0) assert buffer.getvalue( ) == '\r 25%[=======> ] 25.00 eta 00:00:30 '
def test_progress_unknown_total_size(self, monkeypatch): """ Test that progress of a download is shown correctly. Test the case when total download size is not known. """ buffer = StringIO() monkeypatch.setattr('sys.stdout', buffer) monkeypatch.setattr('time.time', lambda: 10.0) DownloadHelper.progress(-1, 1024 * 1024, 0.0) assert buffer.getvalue( ) == '\r [ <=> ] 1.00M in 00:00:10 '
def test_download_non_existing_file_FTP(self): """ Test downloading NON existing file via FTP :return: """ KNOWN_URL = "ftp://ftp.isc.org/isc/bind9/9.10.3-P5/srcid" LOCAL_FILE = os.path.basename(KNOWN_URL) with pytest.raises(DownloadError): DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert not os.path.isfile(LOCAL_FILE)
def test_download_non_existing_file_FTP(self): """ Test downloading NON existing file via FTP :return: """ KNOWN_URL = 'ftp://ftp.isc.org/isc/bind9/9.10.3-P5/srcid' LOCAL_FILE = os.path.basename(KNOWN_URL) with pytest.raises(DownloadError): DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert not os.path.isfile(LOCAL_FILE)
def test_download_existing_file_FTP(self): """ Test downloading existing file via FTP """ KNOWN_URL = "ftp://ftp.isc.org/isc/bind9/9.10.4-P1/srcid" LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = "SRCID=adfc588" DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.read().strip() == KNOWN_URL_CONTENT
def test_download_existing_file_HTTP(self): """ Test downloading existing file via HTTP. """ KNOWN_URL = "http://fedoraproject.org/static/hotspot.txt" LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = "OK" DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.read().strip() == KNOWN_URL_CONTENT
def test_download_non_existing_file_HTTPS(self): """ Test downloading NON existing file via HTTPS :return: """ KNOWN_URL = 'https://ftp.isc.org/isc/bind9/9.10.3-P5/srcid' LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = 'SRCID=adfc588' with pytest.raises(DownloadError): DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert not os.path.isfile(LOCAL_FILE)
def test_download_existing_file_HTTP(self): """ Test downloading existing file via HTTP. """ KNOWN_URL = 'http://fedoraproject.org/static/hotspot.txt' LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = 'OK' DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.read().strip() == KNOWN_URL_CONTENT
def test_download_existing_file_FTP(self): """ Test downloading existing file via FTP """ KNOWN_URL = 'ftp://ftp.isc.org/isc/bind9/9.10.4-P1/srcid' LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = 'SRCID=adfc588' DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.read().strip() == KNOWN_URL_CONTENT
def test_download_existing_file_HTTPS(self): """ Test downloading exiting file via HTTPS. """ KNOWN_URL = 'https://ftp.isc.org/isc/bind9/9.10.4-P1/srcid' LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = 'SRCID=adfc588' DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.read().strip() == KNOWN_URL_CONTENT
def test_download_non_existing_file_HTTPS(self): """ Test downloading NON existing file via HTTPS :return: """ KNOWN_URL = "https://ftp.isc.org/isc/bind9/9.10.3-P5/srcid" LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = "SRCID=adfc588" with pytest.raises(DownloadError): DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert not os.path.isfile(LOCAL_FILE)
def test_download_existing_file_of_unknown_length_FTP(self): """ Test downloading existing file of unknown length via FTP :return: """ KNOWN_URL = 'ftp://ftp.gnupg.org/README' LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = 'Welcome hacker!' DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.readline().strip() == KNOWN_URL_CONTENT
def test_download_existing_file_of_unknown_length_FTP(self): """ Test downloading existing file of unknown length via FTP :return: """ KNOWN_URL = "ftp://ftp.gnupg.org/README" LOCAL_FILE = os.path.basename(KNOWN_URL) KNOWN_URL_CONTENT = "Welcome hacker!" DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.readline().strip() == KNOWN_URL_CONTENT
def test_download_existing_file_of_unknown_length_HTTPS(self): """ Test downloading existing file of unknown length via HTTPS :return: """ COMMIT = "cf5ae2989a32c391d7769933e0267e6fbfae8e14" KNOWN_URL = "https://git.kernel.org/cgit/linux/kernel/git/stable/linux-stable.git/patch/?id={}".format(COMMIT) LOCAL_FILE = "{}.patch".format(COMMIT) KNOWN_URL_CONTENT = "From {} Mon Sep 17 00:00:00 2001".format(COMMIT) DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.readline().strip() == KNOWN_URL_CONTENT
def _get_commit_hash_from_github(cls, spec_file): """ Tries to find a commit using Github API :param spec_file: SPEC file to base the search on :return: SHA of a commit, or None """ m = re.match(r'^https?://github\.com/(?P<owner>[\w-]+)/(?P<project>[\w-]+)/.*$', spec_file.sources[0]) if not m: return None baseurl = 'https://api.github.com/repos/{owner}/{project}'.format(**m.groupdict()) # try to get tag name from a release matching version r = DownloadHelper.request('{}/releases'.format(baseurl)) if r is None: return None if not r.ok: if r.status_code == 403 and r.headers.get('X-RateLimit-Remaining') == '0': logger.warning("Rate limit exceeded on Github API! Try again later.") return None data = r.json() version = spec_file.get_version() tag_name = None for release in data: if version in release.get('name'): tag_name = release.get('tag_name') break r = DownloadHelper.request('{}/tags'.format(baseurl)) if r is None: return None if not r.ok: if r.status_code == 403 and r.headers.get('X-RateLimit-Remaining') == '0': logger.warning("Rate limit exceeded on Github API! Try again later.") return None data = r.json() for tag in data: name = tag.get('name') if tag_name: if name != tag_name: continue else: # no specific tag name, try common tag names if name not in [version, 'v{}'.format(version)]: continue commit = tag.get('commit') if commit: return commit.get('sha') return None
def test_download_existing_file_of_unknown_length_HTTPS(self): """ Test downloading existing file of unknown length via HTTPS :return: """ COMMIT = 'cf5ae2989a32c391d7769933e0267e6fbfae8e14' KNOWN_URL = 'https://git.kernel.org/cgit/linux/kernel/git/stable/linux-stable.git/patch/?id={}'.format( COMMIT) LOCAL_FILE = '{}.patch'.format(COMMIT) KNOWN_URL_CONTENT = 'From {} Mon Sep 17 00:00:00 2001'.format(COMMIT) DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert os.path.isfile(LOCAL_FILE) with open(LOCAL_FILE) as f: assert f.readline().strip() == KNOWN_URL_CONTENT
def _get_version(cls, package_name): r = DownloadHelper.request('{}/{}/json'.format(cls.API_URL, package_name)) if r is None or not r.ok: # try to strip python prefix package_name = re.sub(r'^python[23]?-', '', package_name) r = DownloadHelper.request('{}/{}/json'.format( cls.API_URL, package_name)) if r is None or not r.ok: return None data = r.json() try: return data['info']['version'] except KeyError: return None
def _get_version(cls, package_name): # special-case "ruby", as https://rubygems.org/api/v1/gems/ruby.json returns nonsense if package_name == 'ruby': return None r = DownloadHelper.request('{}/{}.json'.format(cls.API_URL, package_name)) if r is None or not r.ok: # try to strip rubygem prefix package_name = re.sub(r'^rubygem-', '', package_name) r = DownloadHelper.request('{}/{}.json'.format( cls.API_URL, package_name)) if r is None or not r.ok: return None data = r.json() return data.get('version')
def test_keyboard_interrupt_situation(self, monkeypatch): """ Test that the local file is deleted in case KeyboardInterrupt is raised during the download """ KNOWN_URL = 'https://ftp.isc.org/isc/bind9/9.10.4-P1/srcid' LOCAL_FILE = os.path.basename(KNOWN_URL) def interrupter(*args, **kwargs): raise KeyboardInterrupt # make sure that some function call inside tha actual download section raises the KeyboardInterrupt exception. monkeypatch.setattr('time.time', interrupter) with pytest.raises(KeyboardInterrupt): DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert not os.path.exists(LOCAL_FILE)
def test_keyboard_interrupt_situation(self, monkeypatch): """ Test that the local file is deleted in case KeyboardInterrupt is raised during the download """ KNOWN_URL = "https://ftp.isc.org/isc/bind9/9.10.4-P1/srcid" LOCAL_FILE = os.path.basename(KNOWN_URL) def interrupter(*args, **kwargs): raise KeyboardInterrupt # make sure that some function call inside tha actual download section raises the KeyboardInterrupt exception. monkeypatch.setattr("time.time", interrupter) with pytest.raises(KeyboardInterrupt): DownloadHelper.download_file(KNOWN_URL, LOCAL_FILE) assert not os.path.exists(LOCAL_FILE)
def _get_version_using_distro_api(cls, package_name): r = DownloadHelper.request('{}/project/Fedora/{}'.format( cls.API_URL, package_name)) if r is None or not r.ok: return None data = r.json() return data.get('version')
def _get_initial_sources_list(self): """Function returns all sources mentioned in SPEC file""" # get all regular sources sources = [] tar_sources = [] sources_list = [x for x in self.spc.sources if x[2] == 1] remote_files_re = re.compile(r'(http:|https:|ftp:)//.*') for index, src in enumerate(sorted(sources_list, key=lambda source: source[1])): # src is type of (SOURCE, Index of source, Type of source (PAtch, Source) # We need to download all archives and only the one abs_path = os.path.join(self.sources_location, os.path.basename(src[0]).strip()) sources.append(abs_path) archive = [x for x in Archive.get_supported_archives() if src[0].endswith(x)] # if the source is a remote file, download it if archive: if remote_files_re.search(src[0]) and self.download: DownloadHelper.download_file(src[0], abs_path) tar_sources.append(abs_path) return sources, tar_sources
def _get_initial_sources_list(self): """ Function returns all sources mentioned in SPEC file """ # get all regular sources sources = [] sources_list = [x for x in self.spc.sources if x[2] == 1] remote_files_re = re.compile(r'(http:|https:|ftp:)//.*') for index, src in enumerate(sources_list): abs_path = os.path.join(self.sources_location, os.path.basename(src[0]).strip()) # if the source is a remote file, download it if remote_files_re.search(src[0]) and self.download: DownloadHelper.download_file(src[0], abs_path) # the Source0 has to be at the beginning! if src[1] == 0: sources.insert(0, abs_path) else: sources.append(abs_path) return sources
def _get_version(cls, package_name): if package_name.startswith('ghc-'): package_name = package_name.replace('ghc-', '', 1) r = DownloadHelper.request('{}/package/{}/preferred'.format( cls.API_URL, package_name), headers={'Accept': 'application/json'}) if r is None or not r.ok: return None data = r.json() return data.get('normal-version')[0]
def _get_version(cls, package_name): # gets the package name format needed in npm registry if package_name.startswith('nodejs-'): package_name = package_name.replace('nodejs-', '') r = DownloadHelper.request('{}/{}'.format(cls.API_URL, package_name)) if r is None or not r.ok: return None data = r.json() try: return data.get('dist-tags').get('latest') except TypeError: return None
def _download_scratch_build(cls, task_list, dir_name): session = cls._session_maker() rpms = [] logs = [] for task_id in task_list: logger.info('Downloading packaged for %i taskID' % task_id) task = session.getTaskInfo(task_id) tasks = [task] for task in tasks: base_path = koji.pathinfo.taskrelpath(task_id) output = session.listTaskOutput(task['id']) for filename in output: logger.info('Downloading file %s' % filename) downloaded_file = os.path.join(dir_name, filename) DownloadHelper.download_file(cls.scratch_url + base_path + '/' + filename, downloaded_file) if filename.endswith('.rpm'): rpms.append(downloaded_file) if filename.endswith('build.log'): logs.append(downloaded_file) session.logout() return rpms, logs
def _get_version(cls, package_name): # sets package name to format used on metacpan if package_name.startswith('perl-'): package_name = package_name.replace('perl-', '', 1) package_name = package_name.replace('-', '::') r = DownloadHelper.request('{}/download_url/{}'.format(cls.API_URL, package_name)) if r is None or not r.ok: return None data = r.json() if data.get('status') == 'latest': return data.get('version') return None
def _get_version_using_pattern_api(cls, package_name): r = DownloadHelper.request('{}/projects'.format(cls.API_URL), params=dict(pattern=package_name)) if r is None or not r.ok: return None data = r.json() try: versions = [ p['version'] for p in data['projects'] if p['name'] == package_name and p['version'] ] except KeyError: return None if not versions: return None # there can be multiple matching projects, just return the highest version of all of them return sorted(versions, key=parse_version, reverse=True)[0]
def test_download_source(self): DownloadHelper.download_file(self.KNOWN_URL, self.LOCAL_FILE) assert os.path.isfile(self.LOCAL_FILE) with open(self.LOCAL_FILE) as f: assert f.read() == self.KNOWN_URL_CONTENT
def test_download_non_existing_file(self, url): """Test downloading NON existing file""" local_file = 'local_file' with pytest.raises(DownloadError): DownloadHelper.download_file(url, local_file) assert not os.path.isfile(local_file)