def url_checker_spec(self): return create_spec(deployment_check.UrlChecker , check_url = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , endpoint = required(delayed(stack_specs.var_spec())) , expect = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , timeout_after = defaulted(integer_spec(), 600) )
def create_result(self, ip, host_port, container_port, meta, val, dividers): """ The format is the same as the default docker cli client:: ip:hostPort:containerPort | ip::containerPort | hostPort:containerPort | containerPort """ if host_port in ('', NotSpecified) and container_port in ('', NotSpecified): container_port = ip ip = NotSpecified host_port = NotSpecified elif container_port in ('', NotSpecified): container_port = host_port host_port = ip ip = NotSpecified elif host_port in ('', NotSpecified): host_port = NotSpecified if host_port == '': host_port = NotSpecified if container_port == '': container_port = NotSpecified if host_port is not NotSpecified: host_port = sb.integer_spec().normalise(meta.indexed_at('host_port'), host_port) container_port = sb.required(container_port_spec()).normalise(meta.indexed_at('container_port'), container_port) return Port(ip, host_port, container_port)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] val = sb.create_spec(Lambda , name = sb.overridden(function_name) , role = sb.required(only_one_spec(resource_spec("lambda", function_name, only=["iam"]))) , code = sb.required(function_code_spec()) , handler = function_handler_spec() , timeout = sb.integer_spec() , runtime = sb.required(formatted_string) , location = sb.required(formatted_string) , description = formatted_string , sample_event = sb.defaulted(sb.or_spec(formatted_dictionary(), sb.string_spec()), "") , desired_output_for_test = sb.defaulted(sb.or_spec(formatted_dictionary(), sb.string_spec()), "") , memory_size = sb.defaulted(divisible_by_spec(64), 128) ).normalise(meta, val) # Hack to make sample_event and desired_output_for_test not appear as a MergedOptions for key in ('sample_event', 'desired_output_for_test'): if isinstance(val[key], MergedOptions): v = val[key].as_dict() class Arbritrary(dictobj): fields = list(v.keys()) val[key] = Arbritrary(**v) return val
def url_checker_spec(self): return create_spec(deployment_check.UrlChecker , check_url = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , endpoint = required(delayed(stack_specs.var_spec())) , expect = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , timeout_after = defaulted(integer_spec(), 600) )
def normalise(self, meta, val): from harpoon.option_spec.harpoon_specs import HarpoonSpec if "content" in val or "context" in val: spec = sb.set_options(mtime=sb.optional_spec(sb.integer_spec()), dest=sb.required(sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter)), content=sb.string_spec(), context=sb.optional_spec(HarpoonSpec().context_spec)) result = spec.normalise(meta, val) if result["content"] != "" and result["context"] is not NotSpecified: raise BadOption("Please don't specify both context and content") mtime = result["mtime"] if mtime is NotSpecified: ctxt = type("Context", (object, ), {"use_git": True})() mtime = meta.everything["mtime"](ctxt) context_name = "{0}-{1}-mtime({2})".format(hashlib.md5(result['content'].encode('utf-8')).hexdigest(), result["dest"].replace("/", "-").replace(" ", "--"), mtime) extra_context = (result["content"], context_name) if result["context"] is not NotSpecified: context_name = "{0}.tar".format(context_name) extra_context = ({"context": result["context"]}, context_name) return Command(("ADD", "{0} {1}".format(context_name, result["dest"])), extra_context) else: spec = sb.set_options( get=sb.required(sb.listof(sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter))) , prefix = sb.defaulted(sb.string_spec(), "") ) result = spec.normalise(meta, val) final = [] for val in result["get"]: final.append(Command(("ADD", "{0} {1}/{2}".format(val, result["prefix"], val)))) return final
def wait_condition_spec(self): """Spec for a wait_condition block""" from harpoon.option_spec import image_objs formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) return create_spec(image_objs.WaitCondition , harpoon = formatted(overridden("{harpoon}"), formatter=MergedOptionStringFormatter) , timeout = defaulted(integer_spec(), 300) , wait_between_attempts = defaulted(float_spec(), 5) , greps = optional_spec(dictof(formatted_string, formatted_string)) , command = optional_spec(listof(formatted_string)) , port_open = optional_spec(listof(integer_spec())) , file_value = optional_spec(dictof(formatted_string, formatted_string)) , curl_result = optional_spec(dictof(formatted_string, formatted_string)) , file_exists = optional_spec(listof(formatted_string)) )
def normalise_filled(self, meta, val): val = sb.integer_spec().normalise(meta, val) if val % self.divider != 0: raise BadSpecValue("Value should be divisible by {0}".format( self.divider), meta=meta) return val
class container_port_spec(many_item_formatted_spec): value_name = "Container port" specs = [sb.integer_spec()] optional_specs = [sb.string_spec()] formatter = MergedOptionStringFormatter seperators = ['/'] def create_result(self, port, transport, meta, val, dividiers): return ContainerPort(port, transport)
def wait_condition_spec(self): """Spec for a wait_condition block""" from harpoon.option_spec import image_objs formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) return create_spec( image_objs.WaitCondition, harpoon=formatted(overridden("{harpoon}"), formatter=MergedOptionStringFormatter), timeout=defaulted(integer_spec(), 300), wait_between_attempts=defaulted(float_spec(), 5), greps=optional_spec(dictof(formatted_string, formatted_string)), command=optional_spec(listof(formatted_string)), port_open=optional_spec(listof(integer_spec())), file_value=optional_spec(dictof(formatted_string, formatted_string)), curl_result=optional_spec( dictof(formatted_string, formatted_string)), file_exists=optional_spec(listof(formatted_string)))
def environment_spec(self): """Spec for each environment""" return create_spec( stack_objs.Environment, account_id=required( or_spec(valid_string_spec(validators.regexed("\d+")), integer_spec())), region=defaulted(string_spec(), "ap-southeast-2"), vars=dictionary_spec(), tags=self.tags_spec)
class transition_spec(statement_spec): args = lambda s, self_type, self_name: { "days": sb.optional_spec(sb.integer_spec()), "date": capitalized_only_spec(), ("storage", "class"): sb.string_choice_spec(["GLACIER", "STANDARD_IA"]) } required = ["storageclass"] conflicting = [('days', 'date')] validators = [validators.has_either(["days", "Days", "date", "Date"])] final_kls = lambda s, *args, **kwargs: LifecycleTransitionConfig( *args, **kwargs)
def ultradns_site_spec(self, this): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) return sb.create_spec(UltraDNSSite , name = sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , ttl = sb.optional_spec(sb.integer_spec()) , provider = sb.any_spec() , record_type = sb.required(formatted_string) , zone = sb.required(formatted_string) , domain = sb.required(formatted_string) , environments = sb.required(self.dns_environment_spec(this)) )
def ultradns_site_spec(self, this): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) return sb.create_spec( UltraDNSSite, name=sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), ttl=sb.optional_spec(sb.integer_spec()), provider=sb.any_spec(), record_type=sb.required(formatted_string), zone=sb.required(formatted_string), domain=sb.required(formatted_string), environments=sb.required(self.dns_environment_spec(this)))
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] val = sb.create_spec(Lambda, name=sb.overridden(function_name), role=sb.required( only_one_spec( resource_spec("lambda", function_name, only=["iam"]))), code=sb.required(function_code_spec()), handler=function_handler_spec(), timeout=sb.integer_spec(), runtime=sb.required(formatted_string), location=sb.required(formatted_string), description=formatted_string, sample_event=sb.defaulted( sb.or_spec(formatted_dictionary(), sb.string_spec()), ""), desired_output_for_test=sb.defaulted( sb.or_spec(formatted_dictionary(), sb.string_spec()), ""), memory_size=sb.defaulted(divisible_by_spec(64), 128)).normalise( meta, val) # Hack to make sample_event and desired_output_for_test not appear as a MergedOptions for key in ('sample_event', 'desired_output_for_test'): if isinstance(val[key], MergedOptions): v = val[key].as_dict() class Arbritrary(dictobj): fields = list(v.keys()) val[key] = Arbritrary(**v) return val
def normalise_filled(self, meta, val): if isinstance(val, six.string_types): return (0, (val, )) else: if isinstance(val, list) or isinstance(val, tuple) and len(val) > 0: is_int = type(val[0]) is int is_digit = getattr(val[0], "isdigit", lambda: False)() if not is_int and not is_digit: val = (0, val) spec = sb.tuple_spec(sb.integer_spec(), sb.tupleof(sb.string_spec())) return spec.normalise(meta, val)
class lifecycle_statement_spec(statement_spec): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) args = lambda s, self_type, self_name: { "id": s.formatted_string, "enabled": sb.boolean(), "prefix": s.formatted_string, "transition": transition_spec("transition", "transition"), "expiration": sb.or_spec(sb.integer_spec(), expiration_spec("expiration", "expiration")), (("sep", "_"), ("parts", ("abort", "incomplete", "multipart", "upload"))): made_up_dict(sb.integer_spec(), ("DaysAfterInitiation", )), (("sep", "_"), ("parts", ("noncurrent", "version", "transition"))): capitalized_only_spec(), (("sep", "_"), ("parts", ("noncurrent", "version", "expiration"))): capitalized_only_spec() } final_kls = lambda s, *args, **kwargs: LifeCycleConfig(*args, **kwargs)
def normalise(self, meta, val): from harpoon.option_spec.harpoon_specs import HarpoonSpec formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) val = sb.apply_validators(meta, val, [validators.either_keys(["context"], ["content"], ["get"], ["formatted"])]) if "get" in val: val = sb.create_spec(CommandAddExtra , get = sb.required(sb.listof(formatted_string)) , prefix = sb.optional_spec(sb.string_spec()) ).normalise(meta, val) if "context" in val: val = sb.create_spec(CommandContextAdd , dest = sb.required(formatted_string) , mtime = sb.optional_spec(sb.integer_spec()) , context = sb.required(HarpoonSpec().context_spec) ).normalise(meta, val) if "formatted" in val: val = sb.create_spec(CommandContentAdd , dest = sb.required(formatted_string) , mtime = sb.optional_spec(sb.integer_spec()) , content = sb.overridden(sb.NotSpecified) , formatted = sb.container_spec(CommandContentAddString, formatted_string) ).normalise(meta, val) if "content" in val: val = sb.create_spec(CommandContentAdd , dest = sb.required(formatted_string) , mtime = sb.optional_spec(sb.integer_spec()) , content = sb.match_spec( (six.string_types, sb.container_spec(CommandContentAddString, sb.string_spec())) , fallback = complex_ADD_from_image_spec() ) ).normalise(meta, val) return list(val.commands(meta))
def confirm_deployment_spec(self): return create_spec(deployment_check.ConfirmDeployment , deploys_s3_path = optional_spec(listof(stack_specs.s3_address())) , zero_instances_is_ok = defaulted(boolean(), False) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , url_checker = optional_spec(self.url_checker_spec) , sns_confirmation = optional_spec(create_spec(deployment_check.SNSConfirmation , validators.deprecated_key("auto_scaling_group_id", "Use ``confirm_deployment.auto_scaling_group_name``") , validators.deprecated_key("env", "Use ``stack.<stack>.env`` instead``") , timeout = defaulted(integer_spec(), 300) , version_message = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , deployment_queue = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) )
def confirm_deployment_spec(self): return create_spec(deployment_check.ConfirmDeployment , deploys_s3_path = optional_spec(listof(stack_specs.s3_address())) , zero_instances_is_ok = defaulted(boolean(), False) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , url_checker = optional_spec(self.url_checker_spec) , sns_confirmation = optional_spec(create_spec(deployment_check.SNSConfirmation , validators.deprecated_key("auto_scaling_group_id", "Use ``confirm_deployment.auto_scaling_group_name``") , validators.deprecated_key("env", "Use ``stack.<stack>.env`` instead``") , timeout = defaulted(integer_spec(), 300) , version_message = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , deployment_queue = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) )
class expiration_spec(statement_spec): args = lambda s, self_type, self_name: { "days": sb.optional_spec(sb.integer_spec()), "date": capitalized_only_spec(), (("sep", "_"), ("parts", ("expired", "object", "delete", "marker"))): sb.optional_spec(sb.boolean()) } conflicting = [('days', 'date', 'expired_object_delete_marker')] validators = [ validators.has_either([ "days", "Days", "date", "Date", "expired_object_delete_marker", "ExpiredObjectDeleteMarker" ]) ] final_kls = lambda s, *args, **kwargs: LifecycleExpirationConfig( *args, **kwargs)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] return sb.create_spec(Lambda , name = sb.overridden(function_name) , role = sb.required(only_one_spec(resource_spec("lambda", function_name, only=["iam"]))) , code = sb.required(function_code_spec()) , handler = function_handler_spec() , timeout = sb.integer_spec() , runtime = sb.required(formatted_string) , location = sb.required(formatted_string) , description = formatted_string , sample_event = sb.defaulted(sb.or_spec(sb.dictionary_spec(), sb.string_spec()), "") , memory_size = sb.defaulted(divisible_by_spec(64), 128) ).normalise(meta, val)
formatter=MergedOptionStringFormatter), provider_type=sb.required(sb.string_spec()), username=sb.required(formatted_string), password=sb.required(formatted_string)) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) artifact_command_spec = lambda: sb.create_spec( ArtifactCommand, copy=sb.listof(artifact_path_spec()), modify=sb.dictof(sb.string_spec(), sb.set_options(append=sb.listof(formatted_string))), command=sb.listof(formatted_string), timeout=sb.defaulted(sb.integer_spec(), 600), temp_dir=sb.defaulted(formatted_string, None), add_into_tar=sb.listof(artifact_path_spec())) params_json_spec = lambda: sb.listof( sb.set_options(ParameterKey=sb.required(sb.any_spec()), ParameterValue=sb.required(sb.any_spec()))) params_yaml_spec = lambda: sb.dictof( sb.string_spec(), sb.formatted(sb.string_or_int_as_string_spec(), formatter=MergedOptionStringFormatter)) stack_json_spec = lambda: sb.set_options(Resources=sb.required( sb.dictof( sb.string_spec(),
@memoized_property def ultradns_provider_spec(self): return sb.create_spec(UltraDNSProvider , name = sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , provider_type = sb.required(sb.string_spec()) , username = sb.required(formatted_string) , password = sb.required(formatted_string) ) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) artifact_command_spec = lambda : sb.create_spec(ArtifactCommand , copy = sb.listof(artifact_path_spec()) , modify = sb.dictof(sb.string_spec(), sb.set_options(append=sb.listof(formatted_string))) , command = sb.listof(formatted_string) , timeout = sb.defaulted(sb.integer_spec(), 600) , temp_dir = sb.defaulted(formatted_string, None) , add_into_tar = sb.listof(artifact_path_spec()) ) params_json_spec = lambda: sb.listof(sb.set_options( ParameterKey = sb.required(sb.any_spec()) , ParameterValue = sb.required(sb.any_spec()) )) params_yaml_spec = lambda: sb.dictionary_spec() stack_json_spec = lambda: sb.set_options( Resources = sb.required(sb.dictof(sb.string_spec(), sb.set_options(Type=sb.required(sb.string_spec()), Properties=sb.optional_spec(sb.dictionary_spec())))) , Parameters = sb.optional_spec(sb.dictof(sb.string_spec(), sb.dictionary_spec())) , Outputs = sb.optional_spec(sb.dictof(sb.string_spec(), sb.dictionary_spec()))
class Filter(dictobj.Spec): """ The options for a filter. Usage looks like: .. code-block:: python filtr = Filter.FieldSpec().empty_normalise(force_refresh=True, firmware_version=1.22) # or filtr = Filter.from_json_str('{"force_refresh": true, "firmware_version": 1.22}') # or filtr = Filter.from_options({"force_refresh": True, "firmware_version": 1.22}) # or filtr = Filter.from_kwargs(force_refresh=True, firmware_version=1.22) # or filtr = Filter.from_key_value_str("force_refresh=true firmware_version=1.22") # or filtr = Filter.from_url_str("force_refresh=true&firmware_version=1.22") .. automethod:: photons_device_finder.Filter.from_options .. automethod:: photons_device_finder.Filter.from_kwargs .. automethod:: photons_device_finder.Filter.empty .. automethod:: photons_device_finder.Filter.from_json_str .. automethod:: photons_device_finder.Filter.from_key_value_str .. automethod:: photons_device_finder.Filter.from_url_str .. autoattribute:: photons_device_finder.Filter.matches_all .. automethod:: photons_device_finder.Filter.matches .. automethod:: photons_device_finder.Filter.has Finally, we have ``has`` which takes in a ``field_name`` and says whether """ force_refresh = dictobj.Field(boolean, default=False) serial = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) label = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) power = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) group_id = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) group_name = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) location_id = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) location_name = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) hue = dictobj.Field(str_ranges, wrapper=sb.optional_spec) saturation = dictobj.Field(str_ranges, wrapper=sb.optional_spec) brightness = dictobj.Field(str_ranges, wrapper=sb.optional_spec) kelvin = dictobj.Field(str_ranges, wrapper=sb.optional_spec) firmware_version = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) product_id = dictobj.Field(sb.listof(sb.integer_spec()), wrapper=sb.optional_spec) product_identifier = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) cap = dictobj.Field(sb.listof(sb.string_spec()), wrapper=sb.optional_spec) @classmethod def from_json_str(kls, s): """ Interpret s as a json string and use it to create a Filter using from_options """ try: options = json.loads(s) except (TypeError, ValueError) as error: raise InvalidJson(error=error) else: if type(options) is not dict: raise InvalidJson("Expected a dictionary", got=type(options)) return kls.from_options(options) @classmethod def from_key_value_str(kls, s): """ Create a Filter based on the ``key=value key2=value2`` string provided. Each key=value pair is separated by a space and arrays are formed by separating values by a comma. Note that values may not have spaces in them because of how we split the key=value pairs. If you need values to have spaces use from_json_str or from_options. """ options = {} for part in s.split(" "): m = regexes["key_value"].match(part) if m: groups = m.groupdict() if groups["key"] not in ("hue", "saturation", "brightness", "kelvin", "force_refresh"): options[groups["key"]] = groups["value"].split(',') else: options[groups["key"]] = groups["value"] return kls.from_options(options) @classmethod def from_url_str(kls, s): """ Create a Filter based on ``key=value&otherkey=value2`` string provided Where the string is url encoded. """ return kls.from_options(parse_qs(s)) @classmethod def from_kwargs(kls, **kwargs): """Create a Filter based on the provided kwarg arguments""" return kls.from_options(kwargs) @classmethod def empty(kls, force_refresh=False): """Create an empty filter""" return kls.from_options({"force_refresh": force_refresh}) @classmethod def from_options(kls, options): """Create a Filter based on the provided dictionary""" if isinstance(options, dict): for option in options: if option not in kls.fields: log.warning( hp.lc("Unknown option provided for filter", wanted=option)) return kls.FieldSpec().normalise(Meta.empty(), options) def has(self, field): """Say whether the filter has an opinion on this field""" return field in self.fields and self[field] != sb.NotSpecified def matches(self, field_name, val): """ Says whether this filter matches against provided filed_name/val pair * Always say False for ``force_refresh`` * Say False if the value on the filter for field_name is NotSpecified * Say True if a hsbk value and we are within the range specified in val * Say True if value on the filter is a list, and val exists in that list * Say True if value on the filter is not a list and matches val """ if field_name == "force_refresh": return False if field_name in self.fields: f = self[field_name] if f is not sb.NotSpecified: if field_name in ("hue", "saturation", "brightness", "kelvin"): return any(val >= pair[0] and val <= pair[1] for pair in f) if field_name in self.label_fields and type(val) is str: if type(f) is list: return any(fnmatch.fnmatch(val, pat) for pat in f) else: return fnmatch.fnmatch(val, f) if type(f) is list: if type(val) is list: return any(v in val for v in f) else: return val in f else: return val == f return False @property def matches_all(self): """True if this Filter matches against any device""" for field in self.fields: if field != "force_refresh": if self[field] != sb.NotSpecified: return False return True @property def points(self, for_info=False): """Provide InfoPoints enums that match the keys on this filter with values""" for e in InfoPoints: for key in e.value.keys: if self[key] != sb.NotSpecified: yield e @property def label_fields(self): return ("product_identifier", "label", "location_name", "group_name")
def normalise_filled(self, meta, val): val = sb.integer_spec().normalise(meta, val) if val % self.divider != 0: raise BadSpecValue("Value should be divisible by {0}".format(self.divider), meta=meta) return val
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), recursive=optional_spec( create_spec( image_objs.Recursive, action=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), persist=required( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter)))), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec(image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted( string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image))), dependency_options=dictof( specs.image_name_spec(), create_spec(image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec( self.wait_condition_spec))), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec(other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec()), network=create_spec(image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False)), cpu=create_spec(image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0)), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None))
class Color(dictobj.Spec): hue = dictobj.Field(sb.integer_spec(), wrapper=sb.required) saturation = dictobj.Field(sb.float_spec(), wrapper=sb.required) brightness = dictobj.Field(sb.float_spec(), wrapper=sb.required) kelvin = dictobj.Field(sb.integer_spec(), wrapper=sb.required)
def stack_spec(self): """Spec for each stack""" return create_spec( stack_objs.Stack, validators.deprecated_key( "url_checker", "Use ``confirm_deployment.url_checker1``"), validators.deprecated_key( "deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``"), validators.deprecated_key( "sns_confirmation", "Use ``confirm_deployment.sns_confirmation``"), validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``"), validators.deprecated_key( "instance_count_limit", "Use ``scaling_options.instance_count_limit``"), bespin=any_spec(), name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), stack_name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), environment=formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), build_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), stack_name_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), tags=self.tags_spec, termination_protection=defaulted(boolean(), False), stack_json=valid_stack_json( default="{config_root}/{_key_name_1}.json"), stack_yaml=valid_stack_yaml( default="{config_root}/{_key_name_1}.yaml"), params_json=valid_params_json( default="{config_root}/{environment}/{_key_name_1}-params.json" ), params_yaml=valid_params_yaml( default="{config_root}/{environment}/{_key_name_1}-params.yaml" ), stack_policy=valid_policy_json( default="{config_root}/{_key_name_1}-policy.json"), role_name=formatted(string_spec(), formatter=MergedOptionStringFormatter), build_first=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_after=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_timeout=defaulted(integer_spec(), 1200), ignore_deps=defaulted(boolean(), False), vars=delayed( dictof(string_spec(), stack_specs.var_spec(), nested=True)), skip_update_if_equivalent=listof(stack_specs.skipper_spec()), suspend_actions=defaulted(boolean(), False), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), artifact_retention_after_deployment=defaulted(boolean(), False), command=optional_spec(string_spec()), netscaler=optional_spec(self.netscaler_spec), notify_stackdriver=defaulted(boolean(), False), stackdriver=optional_spec( create_spec( stack_objs.Stackdriver, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), deployment_version=defaulted( formatted(string_spec(), formatter=MergedOptionStringFormatter), "<version>"))), dns=optional_spec( stack_specs.dns_spec( create_spec( stack_objs.DNS, vars=dictof( string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True), providers=dictof(string_spec(), stack_specs.dns_provider_spec()), sites=delayed( dictof(string_spec(), stack_specs.dns_site_spec()))))), scaling_options=create_spec( ScalingOptions, highest_min=defaulted(integer_spec(), 2), instance_count_limit=defaulted(integer_spec(), 10)), artifacts=container_spec( artifact_objs.ArtifactCollection, dictof( string_spec(), create_spec( artifact_objs.Artifact, not_created_here=defaulted(boolean(), False), compression_type=string_choice_spec(["gz", "xz"]), history_length=integer_spec(), cleanup_prefix=optional_spec(string_spec()), upload_to=formatted( string_spec(), formatter=MergedOptionStringFormatter), commands=listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand), paths=listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath), files=listof( create_spec( artifact_objs.ArtifactFile, validators.has_either(["content", "task"]), content=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), task=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), path=formatted( string_spec(), formatter=MergedOptionStringFormatter), task_runner=formatted( always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter))))) ), newrelic=optional_spec( create_spec( stack_objs.NewRelic, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), account_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), application_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), deployed_version=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), downtimer_options=optional_spec( dictof( valid_string_spec(valid_alerting_system()), create_spec( stack_objs.DowntimerOptions, hosts=listof( formatted( string_spec(), formatter=MergedOptionStringFormatter))))), alerting_systems=optional_spec( dictof(string_spec(), self.alerting_system_spec)), ssh=optional_spec( create_spec( stack_objs.SSH, validators.deprecated_key( "autoscaling_group_id", "Use ``auto_scaling_group_name``"), user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), address=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance=optional_spec( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_path=formatted( defaulted( string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter), instance_key_path=formatted( defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter), storage_type=formatted( defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter), storage_host=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), confirm_deployment=optional_spec(self.confirm_deployment_spec))
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs class persistence_shell_spec(Spec): """Make the persistence shell default to the shell on the image""" def normalise(self, meta, val): shell = defaulted(string_spec(), "/bin/bash").normalise(meta, meta.everything[["images", meta.key_names()["_key_name_2"]]].get("shell", NotSpecified)) shell = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), shell).normalise(meta, val) return shell return create_spec(image_objs.Image , validators.deprecated_key("persistence", "The persistence feature has been removed") , validators.deprecated_key("squash_after", "The squash feature has been removed") , validators.deprecated_key("squash_before_push", "The squash feature has been removed") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon = any_spec() # default the name to the key of the image , tag = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , key_name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , image_name = optional_spec(string_spec()) , image_index = formatted(defaulted(string_spec(), ""), formatter=MergedOptionStringFormatter) , container_name = optional_spec(string_spec()) , image_name_prefix = defaulted(string_spec(), "") , no_tty_option = defaulted(formatted(boolean(), formatter=MergedOptionStringFormatter), False) , user = defaulted(string_spec(), None) , configuration = any_spec() , vars = dictionary_spec() , assume_role = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , deleteable_image = defaulted(boolean(), False) , authentication = self.authentications_spec # The spec itself , shell = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash") , bash = delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , command = delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , commands = required(container_spec(Commands, listof(command_spec()))) , cache_from = delayed(or_spec(boolean(), listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)))) , cleanup_intermediate_images = defaulted(boolean(), True) , links = listof(specs.link_spec(), expect=image_objs.Link) , context = self.context_spec , wait_condition = optional_spec(self.wait_condition_spec) , lxc_conf = defaulted(filename_spec(), None) , volumes = create_spec(image_objs.Volumes , mount = listof(specs.mount_spec(), expect=image_objs.Mount) , share_with = listof(formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image)) ) , dependency_options = dictof(specs.image_name_spec() , create_spec(image_objs.DependencyOptions , attached = defaulted(boolean(), False) , wait_condition = optional_spec(self.wait_condition_spec) ) ) , env = listof(specs.env_spec(), expect=image_objs.Environment) , ports = listof(specs.port_spec(), expect=image_objs.Port) , ulimits = defaulted(listof(dictionary_spec()), None) , log_config = defaulted(listof(dictionary_spec()), None) , security_opt = defaulted(listof(string_spec()), None) , read_only_rootfs = defaulted(boolean(), False) , other_options = create_spec(other_options , start = dictionary_spec() , build = dictionary_spec() , create = dictionary_spec() , host_config = dictionary_spec() ) , network = create_spec(image_objs.Network , dns = defaulted(listof(string_spec()), None) , mode = defaulted(string_spec(), None) , hostname = defaulted(string_spec(), None) , domainname = defaulted(string_spec(), None) , disabled = defaulted(boolean(), False) , dns_search = defaulted(listof(string_spec()), None) , extra_hosts = listof(string_spec()) , network_mode = defaulted(string_spec(), None) , publish_all_ports = defaulted(boolean(), False) ) , cpu = create_spec(image_objs.Cpu , cap_add = defaulted(listof(string_spec()), None) , cpuset_cpus = defaulted(string_spec(), None) , cpuset_mems = defaulted(string_spec(), None) , cap_drop = defaulted(listof(string_spec()), None) , mem_limit = defaulted(integer_spec(), 0) , cpu_shares = defaulted(integer_spec(), None) , memswap_limit = defaulted(integer_spec(), 0) ) , devices = defaulted(listof(dictionary_spec()), None) , privileged = defaulted(boolean(), False) , restart_policy = defaulted(string_spec(), None) )
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``"), validators.deprecated_key("recursive", "Use ``persistence``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), persistence=optional_spec( create_spec( image_objs.Persistence, validators.deprecated_key("persist", "Use ``folders``"), action=required(formatted(string_spec(), formatter=MergedOptionStringFormatter)), folders=required(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))), cmd=optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)), shell=defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash"), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter ) ), ) ), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec( image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image) ), ), dependency_options=dictof( specs.image_name_spec(), create_spec( image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec(self.wait_condition_spec), ), ), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec( other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec(), ), network=create_spec( image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False), ), cpu=create_spec( image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0), ), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None), )
def stack_spec(self): """Spec for each stack""" return create_spec(stack_objs.Stack , validators.deprecated_key("url_checker", "Use ``confirm_deployment.url_checker1``") , validators.deprecated_key("deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``") , validators.deprecated_key("sns_confirmation", "Use ``confirm_deployment.sns_confirmation``") , validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``") , validators.deprecated_key("instance_count_limit", "Use ``scaling_options.instance_count_limit``") , bespin = any_spec() , name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , key_name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , stack_name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , environment = formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter) , env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , build_env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , stack_name_env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , tags = dictionary_spec() , stack_json = valid_stack_json(default="{config_root}/{_key_name_1}.json") , params_json = valid_params_json(default="{config_root}/{environment}/{_key_name_1}-params.json") , params_yaml = valid_params_yaml(default="{config_root}/{environment}/{_key_name_1}-params.yaml") , build_first = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , build_after = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , build_timeout = defaulted(integer_spec(), 1200) , ignore_deps = defaulted(boolean(), False) , vars = dictof(string_spec(), stack_specs.var_spec(), nested=True) , skip_update_if_equivalent = listof(stack_specs.skipper_spec()) , suspend_actions = defaulted(boolean(), False) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , artifact_retention_after_deployment = defaulted(boolean(), False) , command = optional_spec(string_spec()) , netscaler = optional_spec(self.netscaler_spec) , dns = optional_spec(stack_specs.dns_spec(create_spec(stack_objs.DNS , vars = dictof(string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True) , providers = dictof(string_spec(), stack_specs.dns_provider_spec()) , sites = delayed(dictof(string_spec(), stack_specs.dns_site_spec())) ))) , scaling_options = create_spec(ScalingOptions , highest_min = defaulted(integer_spec(), 2) , instance_count_limit = defaulted(integer_spec(), 10) ) , artifacts = container_spec(artifact_objs.ArtifactCollection, dictof(string_spec(), create_spec(artifact_objs.Artifact , not_created_here = defaulted(boolean(), False) , compression_type = string_choice_spec(["gz", "xz"]) , history_length = integer_spec() , cleanup_prefix = optional_spec(string_spec()) , upload_to = formatted(string_spec(), formatter=MergedOptionStringFormatter) , commands = listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand) , paths = listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath) , files = listof(create_spec(artifact_objs.ArtifactFile, validators.has_either(["content", "task"]) , content = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , task = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , path = formatted(string_spec(), formatter=MergedOptionStringFormatter) , task_runner = formatted(always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter) )) ))) , newrelic = optional_spec(create_spec(stack_objs.NewRelic , api_key = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , account_id = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , application_id = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , deployed_version = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) , downtimer_options = optional_spec(dictof(valid_string_spec(valid_alerting_system()) , create_spec(stack_objs.DowntimerOptions , hosts = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) ) )) , alerting_systems = optional_spec(dictof(string_spec(), self.alerting_system_spec)) , ssh = optional_spec(create_spec(stack_objs.SSH , validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``") , user = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_user = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_key_location = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , instance_key_location = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , address = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , instance = optional_spec(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_key_path = formatted(defaulted(string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter) , instance_key_path = formatted(defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter) , storage_type = formatted(defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter) , storage_host = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) , confirm_deployment = optional_spec(self.confirm_deployment_spec) )