def mock_root(self): if not self.root: Root._request = lambda x: json.loads( open(self._get_fixture_path([])).read()) self.root = Root('') # mock_settings.get_settings.return_value = { # '': [job.get_name() for job in self.root.get_jobs()]} return self.root
def init(self): settings_filename = 'fget.yaml' cached_filename = 'fget.jobs' cached_settings_file = os.path.join(self.cache_dir, cached_filename) self.cached_settings = {} if not os.path.isfile(cached_settings_file): fgetprint('Initiating. Please wait...') settings_file = \ pkg_resources.resource_filename('fget', settings_filename) with open(settings_file) as f: settings = yaml.load(f.read()) for url in settings.get('JENKINS_URLS', []): url = url.strip('/') fgetprint('Retrieving jobs from {0}'.format(url)) root_resource = Root(url) for job in root_resource.get_jobs(): if url not in self.cached_settings: self.cached_settings[url] = [] self.cached_settings[url].append(str(job['name'])) with open(cached_settings_file, 'w') as f: for key in self.cached_settings.keys(): f.write(key + '\n') for value in self.cached_settings[key]: f.write(value + '\n') fgetprint('Initiating. Finished.') else: with open(cached_settings_file) as f: for line in f: if line.startswith('http://'): url = line.strip() self.cached_settings[url] = [] continue self.cached_settings[url].append(line.strip())
import json import os import pycurl import cStringIO from fget.resource.root import Root parser = ArgumentParser(description='Fuel artifact downloader') parser.add_argument('-u', '--url', required=True) parser.add_argument('-d', '--dir', required=True) args = parser.parse_args() print 'Generating root fixtures...' root = Root(args.url) with open(os.path.join(args.dir, 'root.json'), 'w') as f: f.write(json.dumps(root._request(), sort_keys=True, indent=4)) print 'Generating job fixtures...' max_jobs = 20 max_builds = 40 i = 0 for job in root.get_jobs(): if ('8.0' not in job.get_name() or any(k in job.get_name() for k in ('system', 'proposed', 'test'))): continue print 'job: {0}'.format(job.get_name())
from argparse import ArgumentParser import json import os import pycurl import cStringIO from fget.resource.root import Root parser = ArgumentParser(description='Fuel artifact downloader') parser.add_argument('-u', '--url', required=True) parser.add_argument('-d', '--dir', required=True) args = parser.parse_args() print 'Generating root fixtures...' root = Root(args.url) with open(os.path.join(args.dir, 'root.json'), 'w') as f: f.write(json.dumps(root._request(), sort_keys=True, indent=4)) print 'Generating job fixtures...' max_jobs = 20 max_builds = 40 i = 0 for job in root.get_jobs(): if ('8.0' not in job.get_name() or any(k in job.get_name() for k in ('system', 'proposed', 'test'))): continue
def main(job, build=None, iso=False, author=None): artifacts_dir = os.path.join(cache_dir, job) if not os.path.isdir(artifacts_dir): os.makedirs(artifacts_dir) url = None for u, jobs in settings.get_settings().items(): if job in jobs: url = u break else: fgetprint('Job not found!', error=True) sys.exit(1) build = None root_resource = Root(url) job = root_resource.get_job(job) if author: build = job.get_build_by_author(author) elif build: build = job.get_build(build) else: build = job.get_last_successful_build() if not build: fgetprint('Build not found!', error=True) sys.exit(1) artifacts = build.get_artifacts() build_dir = os.path.join(artifacts_dir, str(build.get_number())) if not os.path.exists(build_dir): fgetprint('Creating directory: {0}'.format(build_dir)) os.makedirs(build_dir) if artifacts: fgetprint( 'Downloading artifacts from Jenkins to {0}'.format(build_dir)) for artifact in artifacts: filename = os.path.join(build_dir, artifact.get_filename()) fgetprint( 'Downloading artifact: {0}'.format(os.path.basename(filename))) artifact.download(filename) else: fgetprint('No artifacts were found', error=True) sys.exit(1) if iso: glob_path = os.path.join(build_dir, '*.iso.data.txt') for data_file in glob.glob(glob_path): with open(data_file) as f: for line in f.readlines(): if 'HTTP_LINK' in line: _, http_link = line.split('=') http_link = http_link.strip() filename = http_link.split('/')[-1] fgetprint('Downloading iso-file: {0}'.format(filename)) output = os.path.join(build_dir, filename) download_iso(http_link, output)