def validate_test_requirements(self, test_name, spec, metric_labels): """Determine whether or not the test requirements are satisfied. If not, record the reason a skip or failure. This may throw exceptions, which are immediate failure. Args: test_name: [string] The name of the test. spec: [dict] The profile specification containing requirements. This argument will be pruned as values are consumed from it. Returns: True if requirements are satisifed, False if not. """ if not 'api' in spec: raise_and_log_error( UnexpectedError('Test "{name}" is missing an "api" spec.'.format( name=test_name))) requires = spec.pop('requires', {}) configuration = requires.pop('configuration', {}) our_config = vars(self.options) for key, value in configuration.items(): if key not in our_config: message = ('Unknown configuration key "{0}" for test "{1}"' .format(key, test_name)) raise_and_log_error(ConfigError(message)) if value != our_config[key]: reason = ('Skipped test {name} because {key}={want} != {have}' .format(name=test_name, key=key, want=value, have=our_config[key])) with self.__lock: self.__record_skip_test(test_name, reason, 'IncompatableConfig', metric_labels) return False services = set(replace_ha_services( requires.pop('services', []), self.options)) services.add(self.__replace_ha_api_service( spec.pop('api'), self.options)) if requires: raise_and_log_error( ConfigError('Unexpected fields in {name}.requires: {remaining}' .format(name=test_name, remaining=requires))) if spec: raise_and_log_error( ConfigError('Unexpected fields in {name} specification: {remaining}' .format(name=test_name, remaining=spec))) def wait_on_services(services): thread_pool = ThreadPool(len(services)) thread_pool.map(self.wait_on_service, services) thread_pool.terminate() self.__deployer.metrics.track_and_time_call( 'WaitingOnServiceAvailability', metric_labels, self.__deployer.metrics.default_determine_outcome_labels, wait_on_services, services) return True
def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.bom_path = None options_copy.bom_version = None options_copy.git_branch = 'master' options_copy.github_hostname = 'github.com' # Overrides later if --git_allow_publish_master_branch is false super(PublishHalyardCommand, self).__init__(factory, options_copy, **kwargs) check_options_set(options, ['halyard_version']) match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', options.halyard_version) if match is None: raise_and_log_error( ConfigError('--halyard_version={version} is not X.Y.Z-<buildnum>' .format(version=options.halyard_version))) self.__stable_version = '{major}.{minor}.{patch}'.format( major=match.group(1), minor=match.group(2), patch=match.group(3)) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options_copy) self.__gradle = GradleRunner(options_copy, self.__scm, self.metrics) self.__halyard_repo_md_path = os.path.join('docs', 'commands.md') dash = self.options.halyard_version.find('-') semver_str = self.options.halyard_version[0:dash] semver_parts = semver_str.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected --halyard_version in the form X.Y.Z-N')) self.__release_branch = 'release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) self.__release_tag = 'version-' + semver_str self.__release_version = semver_str
def determine_commit(self, repository): """Determine the commit_id that we want to publish.""" if repository.name != 'halyard': raise_and_log_error( ConfigError('Unexpected repository "%s"' % repository.name)) options = self.options versions_url = options.halyard_version_commits_url if not versions_url: versions_url = '{base}/{filename}'.format( base=options.halyard_bucket_base_url, filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME) if os.path.exists(versions_url): logging.debug('Loading halyard version info from file %s', versions_url) with open(versions_url, 'r') as stream: version_data = stream.read() else: logging.debug('Loading halyard version info from bucket %s', versions_url) version_data = check_subprocess( 'gsutil cat {url}'.format(url=versions_url)) commit = yaml.load(version_data).get(options.halyard_version) if commit is None: raise_and_log_error( ConfigError( 'Unknown halyard version "{version}" in "{url}"'.format( version=options.halyard_version, url=versions_url)))
def push_tag_and_branch(self, repository): """Pushes a stable branch and git version tag to the origin repository.""" if self.__no_changes: logging.info( 'No changes in spin since last tag, skipping branch and tag push.' ) return git_dir = repository.git_dir git = self.__scm.git match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', self.__gate_version) if match is None: raise_and_log_error( ConfigError( 'gate version {version} is not X.Y.Z-<buildnum>'.format( version=self.__gate_version))) semver_parts = self.__spinnaker_version.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError( 'Expected spinnaker version in the form X.Y.Z-N, got {}'. format(self.__spinnaker_version))) release_branch = 'release-{maj}.{min}.x'.format(maj=semver_parts[0], min=semver_parts[1]) release_tag = 'version-' + self.__stable_version logging.info('Pushing branch=%s and tag=%s to %s', release_branch, release_tag, repository.origin) git.check_run_sequence(git_dir, [ 'checkout -b ' + release_branch, 'push origin ' + release_branch, 'tag ' + release_tag, 'push origin ' + release_tag ])
def consider_debian_on_bintray(self, repository, build_version=None, build_number=None): """Check whether desired version already exists on bintray.""" options = self.__options exists = [] missing = [] # technically we publish to both maven and debian repos. # we can be in a state where we are in one but not the other. # let's not worry about this for now. for bintray_repo in [options.bintray_debian_repository]: #, # options.bintray_jar_repository]: package_name = repository.name if bintray_repo == options.bintray_debian_repository: if package_name == 'spinnaker-monitoring': package_name = 'spinnaker-monitoring-daemon' elif not package_name.startswith('spinnaker'): package_name = 'spinnaker-' + package_name if self.bintray_repo_has_version(bintray_repo, package_name, repository, build_version=build_version, build_number=build_number): exists.append(bintray_repo) else: missing.append(bintray_repo) if exists: if options.skip_existing: if missing: raise_and_log_error( ConfigError( 'Have {name} version for {exists} but not {missing}' .format(name=repository.name, exists=exists[0], missing=missing[0]))) logging.info('Already have %s -- skipping build', repository.name) labels = {'repository': repository.name, 'artifact': 'debian'} self.__metrics.inc_counter( 'ReuseArtifact', labels, 'Kept existing desired debian package version.') return True if options.delete_existing: for repo in exists: self.bintray_repo_delete_version( repo, package_name, repository, build_version=build_version) else: raise_and_log_error( ConfigError('Already have debian for {name}'.format( name=repository.name))) return False
def promote_spin(self, repository): """Promote an existing build to become the spin CLI stable version.""" git_dir = repository.git_dir git = self.__scm.git match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', self.__gate_version) gate_major = match.group(1) gate_min = match.group(2) if match is None: raise_and_log_error( ConfigError( 'gate version {version} is not X.Y.Z-<buildnum>'.format( version=self.__gate_version))) semver_parts = self.__spinnaker_version.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected spinnaker version in the form X.Y.Z-N')) # Note: spin CLI is coupled to the Gate major and minor version. # Gate is a routing server, so features and breaking changes in Gate # must be reflected in spin since it is a client. We pin only the major # and minor versions so fixes (thus patch version) are decoupled between # the two. patch = '0' # Patch is reset on a new Gate major or minor. tag_matcher = re.compile(r'version-{maj}.{min}.(\d+)'.format( maj=gate_major, min=gate_min)) tags = git.fetch_tags(git_dir) tag_matches = [ tag_matcher.match(t) for t in tags if tag_matcher.match(t) ] if tag_matches: patch_versions = [int(m.group(1)) for m in tag_matches] max_patch = max(patch_versions) last_tag = 'version-{maj}.{min}.{max_patch}'.format( maj=gate_major, min=gate_min, max_patch=max_patch) self.__no_changes = git.query_local_repository_commit_id( git_dir) == git.query_commit_at_tag(git_dir, last_tag) patch = str(max_patch + 1) self.__stable_version = '{major}.{minor}.{patch}'.format( major=match.group(1), minor=match.group(2), patch=patch) candidate = self.options.spin_version if self.__no_changes: logging.info( 'No changes in spin since last tag, skipping publish.') else: for d in DIST_ARCH_LIST: source = 'spin/{}/{}/{}/{}'.format(candidate, d.dist, d.arch, d.filename) dest = 'spin/{}/{}/{}/{}'.format(self.__stable_version, d.dist, d.arch, d.filename) self.__gcs_uploader.copy_file(source, dest) self.__update_release_latest_file(gate_major, gate_min) self.__update_global_latest_file()
def __init__(self, factory, options, **kwargs): options.github_disable_upstream_push = True super(BuildDebianCommand, self).__init__(factory, options, **kwargs) self.__semaphore = Semaphore(options.max_local_builds) if not os.environ.get('BINTRAY_KEY'): raise_and_log_error(ConfigError('Expected BINTRAY_KEY set.')) if not os.environ.get('BINTRAY_USER'): raise_and_log_error(ConfigError('Expected BINTRAY_USER set.')) check_options_set( options, ['bintray_org', 'bintray_jar_repository', 'bintray_debian_repository', 'bintray_publish_wait_secs'])
def consider_debian_on_bintray(self, repository, build_version): """Check whether desired version already exists on bintray.""" options = self.__options exists = [] missing = [] # technically we publish to both maven and debian repos. # we can be in a state where we are in one but not the other. # let's not worry about this for now. for bintray_repo in [options.bintray_debian_repository]: # , # options.bintray_jar_repository]: package_name = repository.name if bintray_repo == options.bintray_debian_repository: if package_name == "spinnaker-monitoring": package_name = "spinnaker-monitoring-daemon" elif not package_name.startswith("spinnaker"): package_name = "spinnaker-" + package_name if self.bintray_repo_has_version(bintray_repo, package_name, repository, build_version): exists.append(bintray_repo) else: missing.append(bintray_repo) if exists: if options.skip_existing: if missing: raise_and_log_error( ConfigError( "Have {name} version for {exists} but not {missing}" .format( name=repository.name, exists=exists[0], missing=missing[0], ))) logging.info("Already have %s -- skipping build", repository.name) labels = {"repository": repository.name, "artifact": "debian"} self.__metrics.inc_counter("ReuseArtifact", labels) return True if options.delete_existing: for repo in exists: self.bintray_repo_delete_version( repo, package_name, repository, build_version=build_version) else: raise_and_log_error( ConfigError("Already have debian for {name}".format( name=repository.name))) return False
def __check_gcb_image(self, repository, version): """Determine if gcb image already exists.""" options = self.options image_name = self.scm.repository_name_to_service_name(repository.name) command = [ 'gcloud', '--account', options.gcb_service_account, 'container', 'images', 'list-tags', options.docker_registry + '/' + image_name, '--filter="%s"' % version, '--format=json' ] got = check_subprocess(' '.join(command)) if got.strip() != '[]': labels = { 'repository': repository.name, 'artifact': 'gcr-container' } if self.options.skip_existing: logging.info('Already have %s -- skipping build', image_name) self.metrics.inc_counter('ReuseArtifact', labels) return True if self.options.delete_existing: self.__delete_gcb_image(repository, image_name, version) else: raise_and_log_error( ConfigError('Already have {name} version {version}'.format( name=image_name, version=version))) return False
def __determine_internal_version(self, repository): # Note: spin CLI is coupled to the Gate major and minor version. # Gate is a routing server, so features and breaking changes in Gate # must be reflected in spin since it is a client. git_dir = repository.git_dir git = self.__scm.git match = re.match(r'(\d+)\.(\d+)\.(\d+)-\d+', self.__gate_version) if match is None: raise_and_log_error( ConfigError( 'gate version {version} is not X.Y.Z-<buildnum>'.format( version=self.__gate_version))) gate_major = match.group(1) gate_min = match.group(2) tag_matcher = re.compile(r'version-{maj}.{min}.(\d+)'.format( maj=gate_major, min=gate_min)) tags = git.fetch_tags(git_dir) tag_matches = [ tag_matcher.match(t) for t in tags if tag_matcher.match(t) ] if tag_matches: patch_versions = [int(m.group(1)) for m in tag_matches] max_patch = max(patch_versions) patch = str(max_patch + 1) else: patch = '0' return '{major}.{minor}.{patch}'.format(major=match.group(1), minor=match.group(2), patch=patch)
def __init__(self, factory, options, **kwargs): options_copy = copy.copy(options) options_copy.bom_path = None options_copy.bom_version = None options_copy.git_branch = 'master' # Overrides later if --git_allow_publish_master_branch is false super(PublishHalyardCommand, self).__init__(factory, options_copy, **kwargs) self.__scm = BranchSourceCodeManager(options_copy, self.get_input_dir()) self.__hal = HalRunner(options_copy) self.__gradle = GradleRunner(options_copy, self.__scm, self.metrics) self.__halyard_repo_md_path = os.path.join('docs', 'commands.md') dash = self.options.halyard_version.find('-') semver_str = self.options.halyard_version[0:dash] semver_parts = semver_str.split('.') if len(semver_parts) != 3: raise_and_log_error( ConfigError('Expected --halyard_version in the form X.Y.Z-N')) self.__release_branch = 'release-{maj}.{min}.x'.format( maj=semver_parts[0], min=semver_parts[1]) self.__release_tag = 'version-' + semver_str self.__release_version = semver_str
def __init__(self, factory, options, *pos_args, **kwargs): super(BuildBomCommand, self).__init__(factory, options, *pos_args, **kwargs) if options.refresh_from_bom_path and options.refresh_from_bom_version: raise_and_log_error( ConfigError('Cannot specify both --refresh_from_bom_path="{0}"' ' and --refresh_from_bom_version="{1}"'.format( options.refresh_from_bom_path, options.refresh_from_bom_version))) if options.refresh_from_bom_path: logging.debug('Using base bom from path "%s"', options.refresh_from_bom_path) check_path_exists(options.refresh_from_bom_path, "refresh_from_bom_path") with open(options.refresh_from_bom_path, 'r') as stream: base_bom = yaml.load(stream.read()) elif options.refresh_from_bom_version: logging.debug('Using base bom version "%s"', options.refresh_from_bom_version) base_bom = HalRunner(options).retrieve_bom_version( options.refresh_from_bom_version) else: base_bom = None if base_bom: logging.info('Creating new bom based on version "%s"', base_bom.get('version', 'UNKNOWN')) self.__builder = BomBuilder(self.options, self.scm, base_bom=base_bom)
def determine_halyard_commit(self): """Determine the commit_id that we want to publish.""" options = self.options versions_url = options.halyard_version_commits_url if not versions_url: versions_url = '{base}/{filename}'.format( base=options.halyard_bucket_base_url, filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME) if os.path.exists(versions_url): logging.debug('Loading halyard version info from file %s', versions_url) with open(versions_url, 'r') as stream: version_data = stream.read() else: logging.debug('Loading halyard version info from bucket %s', versions_url) gsutil_output = check_subprocess( 'gsutil cat {url}'.format(url=versions_url), stderr=subprocess.PIPE) # The latest version of gsutil prints a bunch of python warnings to stdout # (see b/152449160). This file is a series of lines that look like... # 0.41.0-180209172926: 05f1e832ab438e5a980d1102e84cdb348a0ab055 # ...so we'll just throw out any lines that don't start with digits. valid_lines = [line for line in gsutil_output.splitlines() if line[0].isdigit()] version_data = "\n".join(valid_lines) commit = yaml.safe_load(version_data).get(options.halyard_version) if commit is None: raise_and_log_error( ConfigError('Unknown halyard version "{version}" in "{url}"'.format( version=options.halyard_version, url=versions_url))) return commit
def clone_repository_to_path( self, repository, commit=None, branch=None, default_branch=None): """Clone the remote repository at the given commit or branch. If requesting a branch and it is not found, then settle for the default branch, if one was explicitly specified. """ # pylint: disable=too-many-arguments if (commit != None) and (branch != None): raise_and_log_error( ConfigError('At most one of commit or branch can be specified.')) origin = repository.origin parts = self.normalize_repo_url(repository.origin) if len(parts) == 3: pull_url = (self.make_ssh_url(*parts) if self.__options.github_pull_ssh else self.make_https_url(*parts)) else: pull_url = origin git_dir = repository.git_dir logging.debug('Begin cloning %s', pull_url) parent_dir = os.path.dirname(git_dir) ensure_dir_exists(parent_dir) clone_command = 'clone ' + pull_url if branch: branches = [branch] if default_branch: branches.append(default_branch) self.__check_clone_branch(pull_url, parent_dir, clone_command, branches) else: self.check_run(parent_dir, clone_command) logging.info('Cloned %s into %s', pull_url, parent_dir) if commit: self.check_run(git_dir, 'checkout -q ' + commit, echo=True) upstream = repository.upstream_or_none() if upstream and not self.is_same_repo(upstream, origin): logging.debug('Adding upstream %s with disabled push', upstream) self.check_run(git_dir, 'remote add upstream ' + upstream) which = ('upstream' if upstream and not self.is_same_repo(upstream, origin) else 'origin') if self.__options.github_disable_upstream_push: self.check_run( git_dir, 'remote set-url --push {which} disabled'.format(which=which)) if which != 'origin' or not self.__options.github_disable_upstream_push: parts = self.normalize_repo_url(repository.origin) if len(parts) == 3: # Origin is not a local path logging.debug('Fixing origin push url') push_url = (self.make_ssh_url(*parts) if self.__options.github_push_ssh else self.make_https_url(*parts)) self.check_run(git_dir, 'remote set-url --push origin ' + push_url) logging.debug('Finished cloning %s', pull_url)
def _do_repository(self, repository): git_dir = repository.git_dir branch = self.options.spinnaker_version logging.debug('Checking for branch="%s" in "%s"', branch, git_dir) remote_branches = [ line.strip() for line in self.__git.check_run(git_dir, 'branch -r').split('\n') ] if 'origin/' + branch in remote_branches: if self.options.skip_existing: logging.info('Branch "%s" already exists in "%s" -- skip', branch, repository.origin) return elif self.options.delete_existing: logging.warning('Branch "%s" already exists in "%s" -- delete', branch, repository.origin) self.__git.delete_branch_on_origin(git_dir, branch) else: raise_and_log_error( ConfigError( 'Branch "{branch}" already exists in "{repo}"'.format( branch=branch, repo=repository.name), cause='branch_exists')) logging.info('Creating and pushing branch "%s" to "%s"', branch, repository.origin) self.__git.check_run(git_dir, 'checkout -b ' + branch) self.__git.push_branch_to_origin(git_dir, branch)
def __init_bintray_versions_helper(self, base_path): artifact_data_dir = os.path.join(base_path, 'collect_artifact_versions') debian_paths = [] jar_paths = [] gcr_paths = [] image_paths = [] for filename in os.listdir(artifact_data_dir): path = os.path.join(artifact_data_dir, filename) if filename.endswith('__gcb_versions.yml'): gcr_paths.append(path) elif filename.endswith('__jar_versions.yml'): jar_paths.append(path) elif filename.endswith('__debian_versions.yml'): debian_paths.append(path) elif filename.endswith('__gce_image_versions.yml'): image_paths.append(path) for name, found in [('jar', jar_paths), ('debian', debian_paths), ('gce image', image_paths), ('gcr image', gcr_paths)]: if len(found) != 1: raise_and_log_error( ConfigError( 'Expected 1 %s version files in "%s": %s' % ( name, artifact_data_dir, found))) logging.debug('Loading container image versions from "%s"', gcr_paths[0]) with open(gcr_paths[0], 'r') as stream: self.__container_versions = yaml.load(stream.read()) with open(jar_paths[0], 'r') as stream: self.__jar_versions = yaml.load(stream.read()) with open(debian_paths[0], 'r') as stream: self.__debian_versions = yaml.load(stream.read()) with open(image_paths[0], 'r') as stream: self.__gce_image_versions = yaml.load(stream.read())
def git_dir(self): """The path to the local repository the origin was cloned to.""" if not self.__git_dir: raise_and_log_error( ConfigError("{0} does not specify a git_dir".format(self)) ) return self.__git_dir
def origin(self): """The origin URL.""" if not self.__origin: raise_and_log_error( ConfigError("{0} does not specify an origin".format(self)) ) return self.__origin
def upstream(self): """The upstream URL.""" if not self.__upstream: raise_and_log_error( ConfigError("{0} does not specify an upstream".format(self)) ) return self.__upstream
def __check_clone_branch(self, remote_url, base_dir, clone_command, branches): remaining_branches = list(branches) while True: branch = remaining_branches.pop(0) cmd = '{clone} -b {branch}'.format(clone=clone_command, branch=branch) retcode, stdout = self.run_git(base_dir, cmd) if not retcode: return not_found = stdout.find('Remote branch {branch} not found' .format(branch=branch)) >= 0 if not not_found: full_command = 'git -C "{dir}" {cmd}'.format(dir=base_dir, cmd=cmd) raise_and_log_error(ExecutionError(full_command, program='git'), full_command + ' failed with:\n' + stdout) if remaining_branches: logging.warning( 'Branch %s does not exist in %s. Retry with %s', branch, remote_url, remaining_branches[0]) continue lines = stdout.split('\n') stdout = '\n '.join(lines) logging.error('git -C "%s" %s failed with output:\n %s', base_dir, cmd, stdout) raise_and_log_error(ConfigError('Branches {0} do not exist in {1}.' .format(branches, remote_url)))
def add_extra_arguments(self, test_name, args, commandline): """Add extra arguments to the commandline. Args: test_name: [string] Name of test specifying the options. args: [dict] Specification of additioanl arguments to pass. Each key is the name of the argument, the value is the value to pass. If the value is preceeded with a '$' then it refers to the value of an option. If the value is None then just add the key without an arg. commandline: [list] The list of command line arguments to append to. """ option_dict = vars(self.options) aliases_dict = self.test_suite.get('aliases', {}) for key, value in args.items(): if isinstance(value, (int, bool)): value = str(value) if key == 'alias': for alias_name in value: if not alias_name in aliases_dict: raise_and_log_error( ConfigError( 'Unknown alias "{name}" referenced in args for "{test}"' .format(name=alias_name, test=test_name))) self.add_extra_arguments(test_name, aliases_dict[alias_name], commandline) continue elif value is None: pass elif value.startswith('$'): option_name = value[1:] if option_name in option_dict: value = option_dict[option_name] or '""' elif option_name in self.__extra_test_bindings: value = self.__extra_test_bindings[option_name] or '""' elif option_name in os.environ: value = os.environ[option_name] else: raise_and_log_error( ConfigError( 'Unknown option "{name}" referenced in args for "{test}"' .format(name=option_name, test=test_name))) if value is None: commandline.append('--' + key) else: commandline.extend(['--' + key, value])
def clone_repository_to_path( self, repository, commit=None, branch=None, default_branch=None ): """Clone the remote repository at the given commit or branch. If requesting a branch and it is not found, then settle for the default branch, if one was explicitly specified. """ # pylint: disable=too-many-arguments if (commit is not None) and (branch is not None): raise_and_log_error( ConfigError("At most one of commit or branch can be specified.") ) pull_url = self.determine_pull_url(repository.origin) git_dir = repository.git_dir logging.debug("Begin cloning %s", pull_url) parent_dir = os.path.dirname(git_dir) ensure_dir_exists(parent_dir) clone_command = "clone " + pull_url if branch: branches = [branch] if default_branch: branches.append(default_branch) self.__check_clone_branch(pull_url, parent_dir, clone_command, branches) else: self.check_run(parent_dir, clone_command) logging.info("Cloned %s into %s", pull_url, parent_dir) if commit: self.checkout(repository, commit) upstream = repository.upstream_or_none() origin = repository.origin if upstream and not self.is_same_repo(upstream, origin): logging.debug("Adding upstream %s with disabled push", upstream) self.check_run(git_dir, "remote add upstream " + upstream) which = ( "upstream" if upstream and not self.is_same_repo(upstream, origin) else "origin" ) if self.__options.github_disable_upstream_push: self.check_run( git_dir, "remote set-url --push {which} disabled".format(which=which) ) if which != "origin" or not self.__options.github_disable_upstream_push: parts = self.normalize_repo_url(repository.origin) if len(parts) == 3: # Origin is not a local path logging.debug("Fixing origin push url") push_url = self.determine_push_url(repository.origin) self.check_run(git_dir, "remote set-url --push origin " + push_url) logging.debug("Finished cloning %s", pull_url)
def patchable(self): """Return True if the changes in this repository is only a patch release.""" previous_parts = self.prev_version.split('.') current_parts = self.version.split('.') if len(previous_parts) != 3: raise_and_log_error( ConfigError('Previous version %s is not X.Y.Z' % self.prev_version)) if len(current_parts) != 3: raise_and_log_error( ConfigError('Version %s is not X.Y.Z' % self.version)) if previous_parts[:2] != current_parts[:2]: return False if int(previous_parts[2]) != int(current_parts[2]) - 1: raise_and_log_error( UnexpectedError( 'Unexpected version sequence {prev} to {current}'.format( prev=self.prev_version, current=self.version))) return True
def get_repository_service_build_version(self, repository): if not self.__bom: raise_and_log_error(UnexpectedError('Missing bom', cause='NotReachable')) service_name = self.repository_name_to_service_name(repository.name) service_entry = self.__bom.get('services', {}).get(service_name, {}) if not service_entry: raise_and_log_error(ConfigError('BOM missing service %s' % service_name)) return service_entry['version']
def check_bom_service(bom, service_name): services = bom.get('services', {}) entry = services.get(service_name) if entry is None: raise_and_log_error( ConfigError('BOM does not contain service "%s"' % service_name, cause='BadBom'), 'BOM missing "%s": %s' % (service_name, services.keys())) return entry
def build(self): options = self.__options if self.__bom_dependencies_path: logging.debug('Loading bom dependencies from %s', self.__bom_dependencies_path) with open(self.__bom_dependencies_path, 'r') as stream: dependencies = yaml.load(stream.read()) logging.debug('Loaded %s', dependencies) else: dependencies = None if not dependencies: dependencies = self.__base_bom.get('dependencies') if not dependencies: raise_and_log_error(ConfigError('No BOM dependencies found')) base_sources = self.__base_bom.get('artifactSources', {}) default_source_prefix = (base_sources.get('gitPrefix', None) or self.determine_most_common_prefix()) for name, version_info in self.__services.items(): repository = self.__repositories[name] origin = repository.origin source_prefix = self.to_url_prefix(origin) if source_prefix != default_source_prefix: version_info['gitPrefix'] = source_prefix branch = options.git_branch or 'master' artifact_sources = { 'gitPrefix': default_source_prefix, } debian_repository = (None if options.bintray_debian_repository is None else 'https://dl.bintray.com/{org}/{repo}'.format( org=options.bintray_org, repo=options.bintray_debian_repository)) artifact_sources.update({ name: source for name, source in [( 'debianRepository', debian_repository), ('dockerRegistry', options.docker_registry), ('googleImageProject', options.publish_gce_image_project)] if source }) services = dict(self.__base_bom.get('services', {})) services.update(self.__services) return { 'artifactSources': artifact_sources, 'dependencies': dependencies, 'services': services, 'version': '%s-%s' % (branch, options.build_number), 'timestamp': '{:%Y-%m-%d %H:%M:%S}'.format(now()) }
def _find_matching_version(self, major_minor_version): versions = self._get_versions() version_filter = lambda v : get_major_minor_version(v.get('version')) == major_minor_version try: return next(v for v in versions if version_filter(v)) except StopIteration: raise_and_log_error( ConfigError( 'There are no active Spinnaker versions for version {branch}.'.format(branch=major_minor_version), cause='IncorrectVersion'))
def __init__(self, factory, options, **kwargs): super(BuildSpinCommand, self).__init__( factory, options, source_repository_names=['spin'], **kwargs) self.__gcs_uploader = SpinGcsUploader(options) self.__build_version = None # recorded after build bom_contents = BomSourceCodeManager.load_bom(options) gate_entry = bom_contents.get('services', {}).get('gate', {}) if not gate_entry: raise_and_log_error( ConfigError('No gate service entry found in bom {}'.format(bom_contents))) self.__gate_version = gate_entry['version']
def __init__(self, factory, options, **kwargs): check_options_set(options, ['build_gce_service_account', 'build_gce_project']) options.github_disable_upstream_push = True super(BuildGceComponentImages, self).__init__(factory, options, **kwargs) artifact_sources = self.source_code_manager.bom['artifactSources'] self.__image_project = artifact_sources['googleImageProject'] if not self.__image_project: raise_and_log_error( ConfigError('BOM has no artifactSources.googleImageProject'))
def __init__(self, factory, options, **kwargs): super(PublishChangelogCommand, self).__init__( factory, make_options_with_fallback(options), source_repository_names=[SPINNAKER_GITHUB_IO_REPOSITORY_NAME], **kwargs) check_options_set(options, ['spinnaker_version', 'changelog_gist_url']) try: logging.debug('Verifying changelog gist exists at "%s"', options.changelog_gist_url) urlopen(options.changelog_gist_url) except HTTPError as error: raise_and_log_error( ConfigError(u'Changelog gist "{url}": {error}'.format( url=options.changelog_gist_url, error=error.message)))