def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] val = sb.create_spec(Lambda , name = sb.overridden(function_name) , role = sb.required(only_one_spec(resource_spec("lambda", function_name, only=["iam"]))) , code = sb.required(function_code_spec()) , handler = function_handler_spec() , timeout = sb.integer_spec() , runtime = sb.required(formatted_string) , location = sb.required(formatted_string) , description = formatted_string , sample_event = sb.defaulted(sb.or_spec(formatted_dictionary(), sb.string_spec()), "") , desired_output_for_test = sb.defaulted(sb.or_spec(formatted_dictionary(), sb.string_spec()), "") , memory_size = sb.defaulted(divisible_by_spec(64), 128) ).normalise(meta, val) # Hack to make sample_event and desired_output_for_test not appear as a MergedOptions for key in ('sample_event', 'desired_output_for_test'): if isinstance(val[key], MergedOptions): v = val[key].as_dict() class Arbritrary(dictobj): fields = list(v.keys()) val[key] = Arbritrary(**v) return val
def normalise(self, meta, val): result = sb.create_spec(LambdaPostMethod , function = formatted_string() , location = formatted_string() , account = sb.optional_spec(formatted_string()) , require_api_key = sb.defaulted(sb.boolean(), False) , mapping = sb.defaulted(mapping_spec(), Mapping("application/json", "$input.json('$')")) ).normalise(meta, val) function = result.function location = None if result.location is not NotSpecified and location is not None: raise BadSpecValue("Please don't specify a defined lambda function and location at the same time", meta=meta) if not isinstance(function, six.string_types): location = function.location function = function.name if location is None and result.location is NotSpecified: raise BadSpecValue("Location is a required key!", meta=meta) result.function = function result.location = location return result
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(resource_policy_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(resource_policy_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) # require_mfa_to_delete is an alias for this permission if val.get("require_mfa_to_delete") is True: delete_policy = {"action": "s3:DeleteBucket", "resource": { "s3": "__self__" }, "Condition": { "Bool": { "aws:MultiFactorAuthPresent": True } } } normalised_delete_policy = resource_policy_dict(effect='Allow').normalise(meta.at("require_mfa_to_delete"), delete_policy) allow_permission.append(normalised_delete_policy) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Bucket , acl = sb.defaulted(sb.match_spec((six.string_types, canned_acl_spec()), (dict, acl_statement_spec('acl', 'acl'))), None) , name = sb.overridden(bucket_name) , location = sb.defaulted(formatted_string, None) , permission = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('bucket', bucket_name))) , tags = sb.dictof(sb.string_spec(), formatted_string) , website = sb.defaulted(website_statement_spec("website", "website"), None) , logging = sb.defaulted(logging_statement_spec("logging", "logging"), None) , lifecycle = sb.defaulted(sb.listof(lifecycle_statement_spec("lifecycle", "lifecycle")), None) ).normalise(meta, val)
def netscaler_spec(self): class to_boolean(Spec): def setup(self, spec): self.spec = spec def normalise_either(self, meta, val): val = self.spec.normalise(meta, val) if type(val) is bool: return val if val == 'False': return False elif val == 'True': return True raise BadConfiguration("Expected a boolean", got=val, meta=meta) return create_spec(netscaler_specs.NetScaler , host = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , dry_run = to_boolean(formatted(overridden("{bespin.dry_run}"), formatter=MergedOptionStringFormatter)) , username = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , configuration_username = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , password = delayed(required(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , configuration_password = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , verify_ssl = defaulted(boolean(), True) , nitro_api_version = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "v1") , configuration = optional_spec(netscaler_specs.configuration_spec()) , syncable_environments = optional_spec(listof(valid_environment_spec())) )
def aws_syncr_spec(self): """Spec for aws_syncr options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) return create_spec(AwsSyncr , extra = defaulted(formatted_string, "") , stage = defaulted(formatted_string, "") , debug = defaulted(boolean(), False) , dry_run = defaulted(boolean(), False) , location = defaulted(formatted_string, "ap-southeast-2") , artifact = formatted_string , environment = formatted_string , config_folder = directory_spec() )
def context_spec(self): """Spec for specifying context options""" from harpoon.option_spec import image_objs return dict_from_bool_spec(lambda meta, val: {"enabled": val} , create_spec(image_objs.Context , include = listof(string_spec()) , exclude = listof(string_spec()) , enabled = defaulted(boolean(), True) , parent_dir = directory_spec(formatted(defaulted(string_spec(), "{config_root}"), formatter=MergedOptionStringFormatter)) , use_gitignore = defaulted(boolean(), False) , use_git_timestamps = defaulted(or_spec(boolean(), listof(string_spec())), False) ) )
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) key_name = meta.key_names()['_key_name_0'] key = sb.create_spec(EncryptionKey , name = sb.overridden(key_name) , location = sb.required(formatted_string) , description = formatted_string , grant = sb.listof(grant_statement_spec('key', key_name)) , admin_users = sb.listof(sb.any_spec()) , permission = sb.listof(sb.dictionary_spec()) , no_root_access = sb.defaulted(sb.boolean(), False) ).normalise(meta, val) statements = key.permission if not key.no_root_access: statements.append({"principal": {"iam": "root"}, "action": "kms:*", "resource": "*", "Sid": ""}) if key.admin_users: for admin_user in key.admin_users: statements.append({"principal": admin_user, "action": "kms:*", "resource": { "kms": "__self__" }, "Sid": ""}) key.policy = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('key', key_name))).normalise(meta.at("admin_users"), statements) return key
def normalise(self, meta, val): from harpoon.option_spec.harpoon_specs import HarpoonSpec if "content" in val or "context" in val: spec = sb.set_options(mtime=sb.optional_spec(sb.integer_spec()), dest=sb.required(sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter)), content=sb.string_spec(), context=sb.optional_spec(HarpoonSpec().context_spec)) result = spec.normalise(meta, val) if result["content"] != "" and result["context"] is not NotSpecified: raise BadOption("Please don't specify both context and content") mtime = result["mtime"] if mtime is NotSpecified: ctxt = type("Context", (object, ), {"use_git": True})() mtime = meta.everything["mtime"](ctxt) context_name = "{0}-{1}-mtime({2})".format(hashlib.md5(result['content'].encode('utf-8')).hexdigest(), result["dest"].replace("/", "-").replace(" ", "--"), mtime) extra_context = (result["content"], context_name) if result["context"] is not NotSpecified: context_name = "{0}.tar".format(context_name) extra_context = ({"context": result["context"]}, context_name) return Command(("ADD", "{0} {1}".format(context_name, result["dest"])), extra_context) else: spec = sb.set_options( get=sb.required(sb.listof(sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter))) , prefix = sb.defaulted(sb.string_spec(), "") ) result = spec.normalise(meta, val) final = [] for val in result["get"]: final.append(Command(("ADD", "{0} {1}/{2}".format(val, result["prefix"], val)))) return final
def wait_condition_spec(self): """Spec for a wait_condition block""" from harpoon.option_spec import image_objs formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) return create_spec(image_objs.WaitCondition , harpoon = formatted(overridden("{harpoon}"), formatter=MergedOptionStringFormatter) , timeout = defaulted(integer_spec(), 300) , wait_between_attempts = defaulted(float_spec(), 5) , greps = optional_spec(dictof(formatted_string, formatted_string)) , command = optional_spec(listof(formatted_string)) , port_open = optional_spec(listof(integer_spec())) , file_value = optional_spec(dictof(formatted_string, formatted_string)) , curl_result = optional_spec(dictof(formatted_string, formatted_string)) , file_exists = optional_spec(listof(formatted_string)) )
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) role_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(permission_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(permission_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(permission_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) allow_to_assume_me = sb.listof(trust_dict("principal")).normalise(meta.at("allow_to_assume_me"), val.get("allow_to_assume_me", NotSpecified)) disallow_to_assume_me = sb.listof(trust_dict("notprincipal")).normalise(meta.at("disallow_to_assume_me"), val.get("disallow_to_assume_me", NotSpecified)) val = val.wrapped() val['trust'] = allow_to_assume_me + disallow_to_assume_me val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Role , name = sb.overridden(role_name) , description = formatted_string , trust = sb.container_spec(Document, sb.listof(trust_statement_spec('role', role_name))) , permission = sb.container_spec(Document, sb.listof(permission_statement_spec('role', role_name))) , make_instance_profile = sb.defaulted(sb.boolean(), False) ).normalise(meta, val)
def alerting_system_spec(self): return create_spec(stack_objs.AlertingSystem , name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , type = string_choice_spec(["nagios"]) , endpoint = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , verify_ssl = defaulted(boolean(), True) )
def normalise(self, meta, val): canned_acls = [ "private", "public-read", "public-read-write", "aws-exec-read" , "authenticated-read", "log-delivery-write" ] acl = sb.defaulted( sb.formatted(sb.string_choice_spec(canned_acls), formatter=MergedOptionStringFormatter) , "private" ).normalise(meta, val) def ret(owner): """http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl""" if acl == "private": new_grants = [Acls.FullControl(owner)] elif acl == "public-read": new_grants = [Acls.FullControl(owner), Acls.Read(Acls.AllUsersGroup)] elif acl == "public-read-write": new_grants = [Acls.FullControl(owner), Acls.Read(Acls.AllUsersGroup), Acls.Write(Acls.AllUsersGroup)] elif acl == "aws-exec-read": new_grants = [Acls.FullControl(owner), Acls.Read(Acls.EC2Group)] elif acl == "authenticated-read": new_grants = [Acls.FullControl(owner), Acls.Read(Acls.AuthenticatedUsersGroup)] elif acl == "log-delivery-write": new_grants = [Acls.FullControl(owner), Acls.Write(Acls.LogDeliveryGroup), Acls.ReadACP(Acls.LogDeliveryGroup)] return {"ACL": acl, "AccessControlPolicy": {"Grants": new_grants}} return ret
def url_checker_spec(self): return create_spec(deployment_check.UrlChecker , check_url = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , endpoint = required(delayed(stack_specs.var_spec())) , expect = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , timeout_after = defaulted(integer_spec(), 600) )
def environment_spec(self): """Spec for each environment""" return create_spec(Environment , account_id = required(or_spec(string_spec(), valid_string_spec(validators.regexed("\d+")))) , region = defaulted(string_spec(), "ap-southeast-2") , vars = dictionary_spec() )
def confirm_deployment_spec(self): return create_spec(deployment_check.ConfirmDeployment , deploys_s3_path = optional_spec(listof(stack_specs.s3_address())) , zero_instances_is_ok = defaulted(boolean(), False) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , url_checker = optional_spec(self.url_checker_spec) , sns_confirmation = optional_spec(create_spec(deployment_check.SNSConfirmation , validators.deprecated_key("auto_scaling_group_id", "Use ``confirm_deployment.auto_scaling_group_name``") , validators.deprecated_key("env", "Use ``stack.<stack>.env`` instead``") , timeout = defaulted(integer_spec(), 300) , version_message = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , deployment_queue = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) )
def context_spec(self): """Spec for specifying context options""" from harpoon.option_spec import image_objs return dict_from_bool_spec(lambda meta, val: {"enabled": val} , create_spec(image_objs.Context , validators.deprecated_key("use_git_timestamps", "Since docker 1.8, timestamps no longer invalidate the docker layer cache") , include = listof(string_spec()) , exclude = listof(string_spec()) , enabled = defaulted(boolean(), True) , find_options = string_spec() , parent_dir = directory_spec(formatted(defaulted(string_spec(), "{config_root}"), formatter=MergedOptionStringFormatter)) , use_gitignore = defaulted(boolean(), False) , ignore_find_errors = defaulted(boolean(), False) ) )
def make_spec(self, meta, formatter): """ Create the spec for this Field: * If callable, then call it * If is nullable * or the spec with none_spec * if we have an after format, do the same with that * if it has a default, wrap in defaulted * If it can be formatted, wrap in formatted * If it has a wrapper, wrap it with that * Return the result! """ spec = self.spec if callable(spec): spec = spec() af = self.after_format if af is not NotSpecified and callable(af): af = af() if self.nullable: spec = defaulted(or_spec(none_spec(), spec), None) if af is not NotSpecified: af = or_spec(none_spec(), af) if self.default is not NotSpecified: spec = defaulted(spec, self.default) if self.formatted: if formatter is None: raise BadSpec("Need a formatter to be defined", meta=meta) else: spec = formatted(spec, formatter=formatter, after_format=af) if self.wrapper is not NotSpecified: spec = self.wrapper(spec) return spec
def normalise(self, meta, val): return sb.create_spec(MockMethod , http_method = sb.overridden(self.method) , resource_name = sb.overridden(self.resource_name) , request_mapping = sb.defaulted(mapping_spec(), Mapping("application/json", '{"statusCode": 200}')) , mapping = mapping_spec() , require_api_key = sb.defaulted(sb.boolean(), False) , sample_event = sb.or_spec(sb.dictionary_spec(), sb.string_spec()) , desired_output_for_test = sb.or_spec(sb.dictionary_spec(), sb.string_spec()) ).normalise(meta, val) for key in ('sample_event', 'desired_output_for_test'): if isinstance(result[key], six.string_types): v = result[key] if v.startswith("{") and v.endswith("}"): v = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.at(key), v) result[key] = v
def alerting_system_spec(self): return create_spec( stack_objs.AlertingSystem, name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), type=string_choice_spec(["nagios"]), endpoint=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), verify_ssl=defaulted(boolean(), True))
def environment_spec(self): """Spec for each environment""" return create_spec( stack_objs.Environment, account_id=required( or_spec(valid_string_spec(validators.regexed("\d+")), integer_spec())), region=defaulted(string_spec(), "ap-southeast-2"), vars=dictionary_spec(), tags=self.tags_spec)
def tasks_spec(self, available_actions, default_action="run"): """Tasks for a particular stack""" return dictof( self.task_name_spec , create_spec(task_objs.Task , action = defaulted(string_choice_spec(available_actions, "No such task"), default_action) , options = dictionary_spec() , overrides = dictionary_spec() , description = string_spec() ) )
def tasks_spec(self, available_actions, default_action="run"): """Tasks for a particular stack""" return dictof( self.task_name_spec , create_spec(task_objs.Task , action = defaulted(string_choice_spec(available_actions, "No such task"), default_action) , options = dictionary_spec() , overrides = dictionary_spec() , description = string_spec() ) )
def url_checker_spec(self): return create_spec( deployment_check.UrlChecker, check_url=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), endpoint=required(delayed(stack_specs.var_spec())), expect=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), timeout_after=defaulted(integer_spec(), 600))
def tasks_spec(self, available_actions, default_action="run"): """Tasks for a particular image""" return dictof( self.task_name_spec , create_spec(task_objs.Task, validators.deprecated_key("spec", "Use ``action`` and ``options`` instead (note that ``action`` defaults to run)") , action = defaulted(string_choice_spec(available_actions, "No such task"), default_action) , options = dictionary_spec() , overrides = dictionary_spec() , description = string_spec() ) )
def make_spec(self): nsd = lambda spec: sb.defaulted(spec, NotSpecified) args = {} for arg, spec in self.args(self.self_type, self.self_name).items(): arg, capitalized = capitalize(arg) args[(arg, capitalized)] = spec kwargs = {} for (arg, capitalized), spec in list(args.items()): kwargs[arg] = nsd(spec) kwargs[capitalized] = sb.any_spec() return args, sb.set_options(**kwargs)
def make_spec(self): nsd = lambda spec: sb.defaulted(spec, NotSpecified) args = {} for arg, spec in self.args(self.self_type, self.self_name).items(): arg, capitalized = capitalize(arg) args[(arg, capitalized)] = spec kwargs = {} for (arg, capitalized), spec in list(args.items()): kwargs[arg] = nsd(spec) kwargs[capitalized] = sb.any_spec() return args, sb.set_options(**kwargs)
def normalise_filled(self, meta, val): val = sb.dictof(sb.string_choice_spec(["s3", "inline", "directory"]), sb.any_spec()).normalise(meta, val) if not val: raise BadSpecValue( "Please specify s3, inline or directory for your code", meta=meta) if len(val) > 1: raise BadSpecValue( "Please only specify one of s3, inline or directory for your code", got=list(val.keys()), meta=meta) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "s3" in val: return sb.create_spec( S3Code, key=formatted_string, bucket=formatted_string, version=sb.defaulted(sb.string_spec(), NotSpecified)).normalise(meta, val['s3']) elif "inline" in val: path = [p for p, _ in meta._path] path.pop() runtime = meta.everything['.'.join(path)].get("runtime", "python") runtime = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter).normalise( meta.at("runtime"), runtime) return sb.create_spec(InlineCode, code=sb.string_spec(), runtime=sb.overridden(runtime)).normalise( meta, {"code": val['inline']}) else: directory = val['directory'] if isinstance(val['directory'], six.string_types): directory = {"directory": val['directory']} if 'directory' in directory: formatted_string = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter) directory['directory'] = formatted_string.normalise( meta.at("directory").at("directory"), directory['directory']) return sb.create_spec(DirectoryCode, directory=sb.directory_spec(), exclude=sb.listof( sb.string_spec())).normalise( meta, directory)
def bespin_spec(self): """Spec for bespin options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec(Bespin , validators.deprecated_key("region", "Please use ``environments.<env>.region``") , config = file_spec() , configuration = any_spec() , assume_role = optional_spec(string_spec()) , dry_run = defaulted(boolean(), False) , flat = defaulted(boolean(), False) , environment = optional_spec(string_spec()) , no_assume_role = defaulted(formatted_boolean, False) , chosen_task = defaulted(formatted_string, "list_tasks") , chosen_stack = defaulted(formatted_string, "") , chosen_artifact = defaulted(formatted_string, "") , extra_imports = listof(imports.import_spec()) )
def normalise(self, meta, val): canned_acls = [ "private", "public-read", "public-read-write", "aws-exec-read", "authenticated-read", "log-delivery-write" ] acl = sb.defaulted( sb.formatted(sb.string_choice_spec(canned_acls), formatter=MergedOptionStringFormatter), "private").normalise(meta, val) def ret(owner): """http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl""" if acl == "private": new_grants = [Acls.FullControl(owner)] elif acl == "public-read": new_grants = [ Acls.FullControl(owner), Acls.Read(Acls.AllUsersGroup) ] elif acl == "public-read-write": new_grants = [ Acls.FullControl(owner), Acls.Read(Acls.AllUsersGroup), Acls.Write(Acls.AllUsersGroup) ] elif acl == "aws-exec-read": new_grants = [ Acls.FullControl(owner), Acls.Read(Acls.EC2Group) ] elif acl == "authenticated-read": new_grants = [ Acls.FullControl(owner), Acls.Read(Acls.AuthenticatedUsersGroup) ] elif acl == "log-delivery-write": new_grants = [ Acls.FullControl(owner), Acls.Write(Acls.LogDeliveryGroup), Acls.ReadACP(Acls.LogDeliveryGroup) ] return {"ACL": acl, "AccessControlPolicy": {"Grants": new_grants}} return ret
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) role_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(permission_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(permission_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( permission_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) allow_to_assume_me = sb.listof(trust_dict("principal")).normalise( meta.at("allow_to_assume_me"), val.get("allow_to_assume_me", NotSpecified)) disallow_to_assume_me = sb.listof( trust_dict("notprincipal")).normalise( meta.at("disallow_to_assume_me"), val.get("disallow_to_assume_me", NotSpecified)) val = val.wrapped() val['trust'] = allow_to_assume_me + disallow_to_assume_me val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Role, name=sb.overridden(role_name), description=formatted_string, trust=sb.container_spec( Document, sb.listof(trust_statement_spec('role', role_name))), permission=sb.container_spec( Document, sb.listof(permission_statement_spec('role', role_name))), make_instance_profile=sb.defaulted(sb.boolean(), False)).normalise(meta, val)
def normalise(self, meta, val): result = sb.create_spec(LambdaMethod , http_method = sb.overridden(self.method) , resource_name = sb.overridden(self.resource_name) , function = formatted_string() , location = formatted_string() , account = sb.optional_spec(formatted_string()) , require_api_key = sb.defaulted(sb.boolean(), False) , request_mapping = sb.defaulted(mapping_spec(), Mapping("application/json", "")) , mapping = sb.defaulted(mapping_spec(), Mapping("application/json", "$input.json('$')")) , sample_event = sb.or_spec(formatted_dictionary(), sb.string_spec()) , desired_output_for_test = sb.or_spec(formatted_dictionary(), sb.string_spec()) ).normalise(meta, val) for key in ('sample_event', 'desired_output_for_test'): if isinstance(result[key], six.string_types): v = result[key] if v.startswith("{") and v.endswith("}"): v = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.at(key), v) result[key] = v function = result.function location = None if result.location is not NotSpecified and location is not None: raise BadSpecValue("Please don't specify a defined lambda function and location at the same time", meta=meta) if not isinstance(function, six.string_types): location = function.location function = function.name if location is None and result.location is NotSpecified: raise BadSpecValue("Location is a required key!", meta=meta) result.function = function result.location = location return result
def normalise_either(self, meta, val): if isinstance(val, six.string_types) or val is NotSpecified: val = formatted(defaulted(string_spec(), self.dflt), formatter=MergedOptionStringFormatter).normalise(meta, val) if os.path.exists(val): try: with open(val) as fle: val = self.filetype.load(fle) except (ValueError, TypeError) as error: raise BadFile(error, filename=val, meta=meta) self.params_spec().normalise(meta, val) else: self.params_spec().normalise(meta, val) return val
def confirm_deployment_spec(self): return create_spec( deployment_check.ConfirmDeployment, deploys_s3_path=optional_spec(listof(stack_specs.s3_address())), zero_instances_is_ok=defaulted(boolean(), False), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), url_checker=optional_spec(self.url_checker_spec), sns_confirmation=optional_spec( create_spec( deployment_check.SNSConfirmation, validators.deprecated_key( "auto_scaling_group_id", "Use ``confirm_deployment.auto_scaling_group_name``"), validators.deprecated_key( "env", "Use ``stack.<stack>.env`` instead``"), timeout=defaulted(integer_spec(), 300), version_message=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), deployment_queue=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)))))
def normalise(self, meta, val): name = meta.key_names()["_key_name_0"] result = sb.create_spec(DomainName , name = sb.overridden(name) , gateway_location = sb.overridden(self.gateway_location) , zone = formatted_string() , stage = formatted_string() , base_path = sb.defaulted(formatted_string(), "(none)") , certificate = sb.required(certificate_spec()) ).normalise(meta, val) while result.zone and result.zone.endswith("."): result.zone = result.zone[:-1] return result
def normalise(self, meta, val): return sb.create_spec( MockMethod, http_method=sb.overridden(self.method), resource_name=sb.overridden(self.resource_name), request_mapping=sb.defaulted( mapping_spec(), Mapping("application/json", '{"statusCode": 200}')), mapping=mapping_spec(), require_api_key=sb.defaulted(sb.boolean(), False), sample_event=sb.or_spec(sb.dictionary_spec(), sb.string_spec()), desired_output_for_test=sb.or_spec(sb.dictionary_spec(), sb.string_spec())).normalise( meta, val) for key in ('sample_event', 'desired_output_for_test'): if isinstance(result[key], six.string_types): v = result[key] if v.startswith("{") and v.endswith("}"): v = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter).normalise( meta.at(key), v) result[key] = v
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] return sb.create_spec(Lambda , name = sb.overridden(function_name) , role = sb.required(only_one_spec(resource_spec("lambda", function_name, only=["iam"]))) , code = sb.required(function_code_spec()) , handler = function_handler_spec() , timeout = sb.integer_spec() , runtime = sb.required(formatted_string) , location = sb.required(formatted_string) , description = formatted_string , sample_event = sb.defaulted(sb.or_spec(sb.dictionary_spec(), sb.string_spec()), "") , memory_size = sb.defaulted(divisible_by_spec(64), 128) ).normalise(meta, val)
def make_spec(self): nsd = lambda spec: sb.defaulted(spec, NotSpecified) args = {} for arg, spec in self.args(self.self_type, self.self_name).items(): arg, capitalized = capitalize(arg) args[(arg, capitalized)] = spec kwargs = {} for (arg, capitalized), spec in list(args.items()): kwargs[arg] = nsd(spec) if capitalized not in kwargs: kwargs[capitalized] = sb.any_spec() filtered_args = dict([((a, c), s) for (a, c), s in args.items() if a and a[0].islower()]) return filtered_args, sb.set_options(**kwargs)
def normalise(self, meta, val): name = meta.key_names()["_key_name_0"] result = sb.create_spec( DomainName, name=sb.overridden(name), gateway_location=sb.overridden(self.gateway_location), zone=formatted_string(), stage=formatted_string(), base_path=sb.defaulted(formatted_string(), "(none)"), certificate=sb.required(certificate_spec())).normalise(meta, val) while result.zone and result.zone.endswith("."): result.zone = result.zone[:-1] return result
def tasks_spec(self, available_actions, default_action="run"): """Tasks for a particular image""" return dictof( self.task_name_spec, create_spec( task_objs.Task, validators.deprecated_key( "spec", "Use ``action`` and ``options`` instead (note that ``action`` defaults to run)" ), action=defaulted( string_choice_spec(available_actions, "No such task"), default_action), options=dictionary_spec(), overrides=dictionary_spec(), description=string_spec()))
def make_spec(self): nsd = lambda spec: sb.defaulted(spec, NotSpecified) args = {} for arg, spec in self.args(self.self_type, self.self_name).items(): arg, capitalized = capitalize(arg) args[(arg, capitalized)] = spec kwargs = {} for (arg, capitalized), spec in list(args.items()): kwargs[arg] = nsd(spec) if capitalized not in kwargs: kwargs[capitalized] = sb.any_spec() filtered_args = dict([((a, c), s) for (a, c), s in args.items() if a and a[0].islower()]) return filtered_args, sb.set_options(**kwargs)
def normalise(self, meta, val): if isinstance(val, six.string_types) or val is NotSpecified: val = formatted(defaulted(string_spec(), self.dflt), formatter=MergedOptionStringFormatter).normalise(meta, val) if os.path.exists(val): if self.filetype is str: with open(val) as fle: return fle.read() try: with open(val) as fle: val = self.filetype.load(fle) except (ValueError, TypeError) as error: raise BadFile(error, filename=val, meta=meta) else: return self.params_spec().normalise(meta, val) else: return NotSpecified if self.filetype is str: return string_spec().normalise(meta, val) return self.params_spec().normalise(meta, val)
def normalise_filled(self, meta, val): val = sb.dictof(sb.string_choice_spec(["s3", "inline", "directory"]), sb.any_spec()).normalise(meta, val) if not val: raise BadSpecValue("Please specify s3, inline or directory for your code", meta=meta) if len(val) > 1: raise BadSpecValue("Please only specify one of s3, inline or directory for your code", got=list(val.keys()), meta=meta) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "s3" in val: return sb.create_spec(S3Code , key = formatted_string , bucket = formatted_string , version = sb.defaulted(sb.string_spec(), NotSpecified) ).normalise(meta, val['s3']) elif "inline" in val: path = [p for p, _ in meta._path] path.pop() runtime = meta.everything['.'.join(path)].get("runtime", "python") runtime = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.at("runtime"), runtime) return sb.create_spec(InlineCode , code = sb.string_spec() , runtime = sb.overridden(runtime) ).normalise(meta, {"code": val['inline']}) else: directory = val['directory'] if isinstance(val['directory'], six.string_types): directory = {"directory": val['directory']} if 'directory' in directory: formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) directory['directory'] = formatted_string.normalise(meta.at("directory").at("directory"), directory['directory']) return sb.create_spec(DirectoryCode , directory = sb.directory_spec() , exclude = sb.listof(sb.string_spec()) ).normalise(meta, directory)
base_path=sb.defaulted(formatted_string(), "(none)"), certificate=sb.required(certificate_spec())).normalise(meta, val) while result.zone and result.zone.endswith("."): result.zone = result.zone[:-1] return result formatted_dictionary_or_string = lambda: sb.match_spec( (six.string_types, formatted_string()), fallback=sb.dictof(sb.string_spec(), formatted_string())) mapping_spec = lambda: sb.create_spec( Mapping, content_type=sb.defaulted(formatted_string(), "application/json"), template=sb.defaulted(formatted_dictionary_or_string(), "$input.json('$')" )) class aws_resource_spec(Spec): def setup(self, method, resource_name): self.method = method self.resource_name = resource_name def normalise(self, meta, val): result = sb.create_spec( LambdaMethod, http_method=sb.overridden(self.method), resource_name=sb.overridden(self.resource_name), function=formatted_string(),
def harpoon_spec(self): """Spec for harpoon options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec( Harpoon, config=file_spec(), extra=defaulted(formatted_string, ""), debug=defaulted(boolean(), False), chosen_task=defaulted(formatted_string, "list_tasks"), chosen_image=defaulted(formatted_string, ""), flat=defaulted(formatted_boolean, False), no_cleanup=defaulted(formatted_boolean, False), interactive=defaulted(formatted_boolean, True), silent_build=defaulted(formatted_boolean, False), keep_replaced=defaulted(formatted_boolean, False), ignore_missing=defaulted(formatted_boolean, False), no_intervention=defaulted(formatted_boolean, False), intervene_afterwards=defaulted(formatted_boolean, False), do_push=defaulted(formatted_boolean, False), only_pushable=defaulted(formatted_boolean, False), docker_context=any_spec(), docker_context_maker=any_spec(), stdout=defaulted(any_spec(), sys.stdout), tty_stdout=defaulted(any_spec(), lambda: sys.stdout), tty_stderr=defaulted(any_spec(), lambda: sys.stderr), )
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``"), validators.deprecated_key("recursive", "Use ``persistence``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), persistence=optional_spec( create_spec( image_objs.Persistence, validators.deprecated_key("persist", "Use ``folders``"), action=required(formatted(string_spec(), formatter=MergedOptionStringFormatter)), folders=required(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))), cmd=optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)), shell=defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash"), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter ) ), ) ), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec( image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image) ), ), dependency_options=dictof( specs.image_name_spec(), create_spec( image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec(self.wait_condition_spec), ), ), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec( other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec(), ), network=create_spec( image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False), ), cpu=create_spec( image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0), ), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None), )
class TransportTarget(dictobj.Spec): """ This is responsible for bringing together the TransportBridge and the TransportItems It implements the ability to create and destroy args_for_run (the bridge), as well as creating a `script` that may be run with `script.run_with`. We also have higher order functions for finding and forgetting devices. When creating your own target do something like: .. code-block:: python class SocketTarget(TransportTarget): item_kls = lambda s: SocketItem bridge_kls = lambda s: SocketBridge description = dictobj.Field(sb.string_spec, default="Understands how to talk to a device over a TCP socket") ``protocol_register`` and ``final_future`` are retrieved automatically from ``Meta`` if we create the transport by doing ``TransportTarget.normalise(meta, **kwargs)`` Note that the path on the meta cannot be root. So make you meta like: .. code-block:: python from input_algorithms.meta import Meta from option_merge import MergedOptions configuration = MergedOptions.using({"protocol_register": ..., "final_future": asyncio.Future()}) # By saying `at("options")` on the meta we are putting it not at root # So when we resolve final_future we don't get recursive option errors meta = Meta(configuration, []).at("options") Generally you'll be passed in a transport via the ``tasks`` mechanism and you won't have to instantiate it yourself. """ protocol_register = dictobj.Field(sb.overridden("{protocol_register}"), formatted=True) final_future = dictobj.Field(sb.overridden("{final_future}"), formatted=True) default_broadcast = dictobj.Field(sb.defaulted(sb.string_spec(), "255.255.255.255")) item_kls = lambda s: TransportItem bridge_kls = lambda s: TransportBridge description = dictobj.Field(sb.string_spec, default="Base transport functionality") @classmethod def create(kls, configuration, options=None): options = options if options is not None else configuration meta = Meta(configuration, []).at("options") return kls.FieldSpec(formatter=MergedOptionStringFormatter).normalise(meta, options) def script(self, raw): """Return us a ScriptRunnerIterator for the given `raw` against this `target`""" items = list(self.simplify(raw)) if len(items) > 1: items = Pipeline(*items) else: items = items[0] return ScriptRunnerIterator(items, target=self) def session(self): info = {} class Session: async def __aenter__(s): afr = info["afr"] = await self.args_for_run() return afr async def __aexit__(s, exc_type, exc, tb): if "afr" in info: await self.close_args_for_run(info["afr"]) return Session() async def args_for_run(self): """Create an instance of args_for_run. This is designed to be shared amongst many `script`""" afr = self.bridge_kls()(self.final_future, self , protocol_register=self.protocol_register , default_broadcast=self.default_broadcast ) await afr.start() return afr async def close_args_for_run(self, args_for_run): """Close an args_for_run""" args_for_run.finish() async def get_list(self, args_for_run, broadcast=sb.NotSpecified, **kwargs): """Return us the targets that we can find from this bridge""" addr = broadcast if broadcast is not sb.NotSpecified else self.default_broadcast found = await args_for_run.find_devices(addr, **kwargs) return sorted([binascii.hexlify(target[:6]).decode() for target in found]) def device_forgetter(self, args_for_run): """Return a function that may be used to forget a device on this args_for_run""" return args_for_run.forget def find(self, args_for_run): """Return a function that may be used to find a device on this args_for_run""" return args_for_run.find def simplify(self, script_part, chain=None): """ Used by ``self.script`` to convert ``raw`` into TransportItems For each leaf child that is found, we gather messages into groups of messages with a ``pack`` method and yield ``self.item_kls()(group)`` with messages that don't have a ``pack`` method yield as is. For example, let's say we have ``[p1, p2, m1, p3]`` where ``m1`` does not have a ``pack`` method and the others do, we'll yield: * ``self.item_kls()([p1, p2])`` * ``m1`` * ``self.item_kls()([p3])`` """ chain = [] if chain is None else chain if type(script_part) is not list: script_part = [script_part] final = [] errors = [] for p in script_part: if getattr(p, "has_children", False): final.append(p.simplified(self.simplify, chain + [p.name])) continue else: if not hasattr(p, "pack"): errors.append(p) else: final.append(p) if errors: raise InvalidScript("Script part has no pack method!", parts=errors, chain=chain) buf = [] for p in final: if hasattr(p, "pack"): buf.append(p) else: if buf: yield self.item_kls()(buf) buf = [] yield p if buf: yield self.item_kls()(buf)
def stack_spec(self): """Spec for each stack""" return create_spec( stack_objs.Stack, validators.deprecated_key( "url_checker", "Use ``confirm_deployment.url_checker1``"), validators.deprecated_key( "deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``"), validators.deprecated_key( "sns_confirmation", "Use ``confirm_deployment.sns_confirmation``"), validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``"), validators.deprecated_key( "instance_count_limit", "Use ``scaling_options.instance_count_limit``"), bespin=any_spec(), name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), stack_name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), environment=formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), build_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), stack_name_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), tags=self.tags_spec, termination_protection=defaulted(boolean(), False), stack_json=valid_stack_json( default="{config_root}/{_key_name_1}.json"), stack_yaml=valid_stack_yaml( default="{config_root}/{_key_name_1}.yaml"), params_json=valid_params_json( default="{config_root}/{environment}/{_key_name_1}-params.json" ), params_yaml=valid_params_yaml( default="{config_root}/{environment}/{_key_name_1}-params.yaml" ), stack_policy=valid_policy_json( default="{config_root}/{_key_name_1}-policy.json"), role_name=formatted(string_spec(), formatter=MergedOptionStringFormatter), build_first=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_after=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_timeout=defaulted(integer_spec(), 1200), ignore_deps=defaulted(boolean(), False), vars=delayed( dictof(string_spec(), stack_specs.var_spec(), nested=True)), skip_update_if_equivalent=listof(stack_specs.skipper_spec()), suspend_actions=defaulted(boolean(), False), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), artifact_retention_after_deployment=defaulted(boolean(), False), command=optional_spec(string_spec()), netscaler=optional_spec(self.netscaler_spec), notify_stackdriver=defaulted(boolean(), False), stackdriver=optional_spec( create_spec( stack_objs.Stackdriver, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), deployment_version=defaulted( formatted(string_spec(), formatter=MergedOptionStringFormatter), "<version>"))), dns=optional_spec( stack_specs.dns_spec( create_spec( stack_objs.DNS, vars=dictof( string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True), providers=dictof(string_spec(), stack_specs.dns_provider_spec()), sites=delayed( dictof(string_spec(), stack_specs.dns_site_spec()))))), scaling_options=create_spec( ScalingOptions, highest_min=defaulted(integer_spec(), 2), instance_count_limit=defaulted(integer_spec(), 10)), artifacts=container_spec( artifact_objs.ArtifactCollection, dictof( string_spec(), create_spec( artifact_objs.Artifact, not_created_here=defaulted(boolean(), False), compression_type=string_choice_spec(["gz", "xz"]), history_length=integer_spec(), cleanup_prefix=optional_spec(string_spec()), upload_to=formatted( string_spec(), formatter=MergedOptionStringFormatter), commands=listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand), paths=listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath), files=listof( create_spec( artifact_objs.ArtifactFile, validators.has_either(["content", "task"]), content=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), task=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), path=formatted( string_spec(), formatter=MergedOptionStringFormatter), task_runner=formatted( always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter))))) ), newrelic=optional_spec( create_spec( stack_objs.NewRelic, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), account_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), application_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), deployed_version=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), downtimer_options=optional_spec( dictof( valid_string_spec(valid_alerting_system()), create_spec( stack_objs.DowntimerOptions, hosts=listof( formatted( string_spec(), formatter=MergedOptionStringFormatter))))), alerting_systems=optional_spec( dictof(string_spec(), self.alerting_system_spec)), ssh=optional_spec( create_spec( stack_objs.SSH, validators.deprecated_key( "autoscaling_group_id", "Use ``auto_scaling_group_name``"), user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), address=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance=optional_spec( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_path=formatted( defaulted( string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter), instance_key_path=formatted( defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter), storage_type=formatted( defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter), storage_host=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), confirm_deployment=optional_spec(self.confirm_deployment_spec))
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), recursive=optional_spec( create_spec( image_objs.Recursive, action=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), persist=required( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter)))), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec(image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted( string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image))), dependency_options=dictof( specs.image_name_spec(), create_spec(image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec( self.wait_condition_spec))), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec(other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec()), network=create_spec(image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False)), cpu=create_spec(image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0)), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None))
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof( resource_policy_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( resource_policy_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) # require_mfa_to_delete is an alias for this permission if val.get("require_mfa_to_delete") is True: delete_policy = { "action": "s3:DeleteBucket", "resource": { "s3": "__self__" }, "Condition": { "Bool": { "aws:MultiFactorAuthPresent": True } } } normalised_delete_policy = resource_policy_dict( effect='Allow').normalise(meta.at("require_mfa_to_delete"), delete_policy) allow_permission.append(normalised_delete_policy) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Bucket, acl=sb.defaulted( sb.match_spec((six.string_types, canned_acl_spec()), (dict, acl_statement_spec('acl', 'acl'))), None), name=sb.overridden(bucket_name), location=sb.defaulted(formatted_string, None), permission=sb.container_spec( Document, sb.listof(resource_policy_statement_spec( 'bucket', bucket_name))), tags=sb.dictof(sb.string_spec(), formatted_string), website=sb.defaulted(website_statement_spec("website", "website"), None), logging=sb.defaulted(logging_statement_spec("logging", "logging"), None), lifecycle=sb.defaulted( sb.listof(lifecycle_statement_spec("lifecycle", "lifecycle")), None)).normalise(meta, val)
def harpoon_spec(self): """Spec for harpoon options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec( Harpoon, config=file_spec(), extra=defaulted(formatted_string, ""), debug=defaulted(boolean(), False), chosen_task=defaulted(formatted_string, "list_tasks"), chosen_image=defaulted(formatted_string, ""), flat=defaulted(formatted_boolean, False), no_cleanup=defaulted(formatted_boolean, False), interactive=defaulted(formatted_boolean, True), silent_build=defaulted(formatted_boolean, False), keep_replaced=defaulted(formatted_boolean, False), ignore_missing=defaulted(formatted_boolean, False), no_intervention=defaulted(formatted_boolean, False), intervene_afterwards=defaulted(formatted_boolean, False), do_push=defaulted(formatted_boolean, False), only_pushable=defaulted(formatted_boolean, False), docker_context=any_spec(), docker_context_maker=any_spec(), stdout=defaulted(any_spec(), sys.stdout), tty_stdout=defaulted(any_spec(), lambda: sys.stdout), tty_stderr=defaulted(any_spec(), lambda: sys.stderr))
result = sb.create_spec(DomainName , name = sb.overridden(name) , gateway_location = sb.overridden(self.gateway_location) , zone = formatted_string() , stage = formatted_string() , base_path = sb.defaulted(formatted_string(), "(none)") , certificate = sb.required(certificate_spec()) ).normalise(meta, val) while result.zone and result.zone.endswith("."): result.zone = result.zone[:-1] return result mapping_spec = lambda: sb.create_spec(Mapping , content_type = sb.defaulted(formatted_string(), "application/json") , template = sb.defaulted(sb.string_spec(), "$input.json('$')") ) class post_lambda_spec(Spec): def normalise(self, meta, val): result = sb.create_spec(LambdaPostMethod , function = formatted_string() , location = formatted_string() , account = sb.optional_spec(formatted_string()) , require_api_key = sb.defaulted(sb.boolean(), False) , mapping = sb.defaulted(mapping_spec(), Mapping("application/json", "$input.json('$')")) ).normalise(meta, val) function = result.function location = None
formatter=MergedOptionStringFormatter), provider_type=sb.required(sb.string_spec()), username=sb.required(formatted_string), password=sb.required(formatted_string)) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) artifact_command_spec = lambda: sb.create_spec( ArtifactCommand, copy=sb.listof(artifact_path_spec()), modify=sb.dictof(sb.string_spec(), sb.set_options(append=sb.listof(formatted_string))), command=sb.listof(formatted_string), timeout=sb.defaulted(sb.integer_spec(), 600), temp_dir=sb.defaulted(formatted_string, None), add_into_tar=sb.listof(artifact_path_spec())) params_json_spec = lambda: sb.listof( sb.set_options(ParameterKey=sb.required(sb.any_spec()), ParameterValue=sb.required(sb.any_spec()))) params_yaml_spec = lambda: sb.dictof( sb.string_spec(), sb.formatted(sb.string_or_int_as_string_spec(), formatter=MergedOptionStringFormatter)) stack_json_spec = lambda: sb.set_options(Resources=sb.required( sb.dictof( sb.string_spec(),
def location(self, meta): return sb.defaulted(sb.string_spec(), self.default_location(meta)).normalise( meta.at("location"), self.resource.get("location", NotSpecified))