def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) gateway_name = meta.key_names()['_key_name_0'] gateway_location = formatted_string().normalise( meta.at('location'), val.get('location', '')) return sb.create_spec( Gateway, name=sb.overridden(gateway_name), location=sb.required(formatted_string()), stages=sb.listof(formatted_string()), api_keys=sb.listof(api_key_spec()), domain_names=sb.dictof(sb.string_spec(), custom_domain_name_spec(gateway_location)), resources=sb.dictof(sb.string_spec(), gateway_resource_spec())).normalise(meta, val)
def normalise_filled(self, meta, val): typ = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter).normalise(meta, val) name = formatted(overridden("{_key_name_0}"), formatter=MergedOptionStringFormatter).normalise(meta, val) special = {} kls = special.get(typ, GenericNetscalerConfig) formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) formatted_options = dictof(string_spec(), match_spec((six.string_types, formatted_string), fallback=any_spec())) options = dict( typ=overridden(typ) , name=overridden(name) , bindings=dictof(string_spec() , netscaler_binding_spec()) , tags=listof(string_spec()) , options=formatted_options , overrides=formatted_options , binding_options=formatted_options , environments=optional_spec(listof(valid_environment_spec())) ) if typ == "sslcertkey": options["link"] = listof(string_spec()) as_dict = set_options(**options).normalise(meta, val) return kls(**dict((name, as_dict[name]) for name in options))
def setup_addon_register(self, photons_app, __main__): """Setup our addon register""" # Create the addon getter and register the crosshair namespace self.addon_getter = AddonGetter() self.addon_getter.add_namespace("lifx.photons", Result.FieldSpec(), Addon.FieldSpec()) # Initiate the addons from our configuration register = Register(self.addon_getter, self) if "addons" in photons_app: addons = photons_app["addons"] if type(addons) in (MergedOptions, dict) or getattr( addons, "is_dict", False): spec = sb.dictof(sb.string_spec(), sb.listof(sb.string_spec())) meta = Meta(photons_app, []).at("addons") for namespace, adns in spec.normalise(meta, addons).items(): register.add_pairs(*[(namespace, adn) for adn in adns]) elif photons_app.get("default_activate_all_modules"): register.add_pairs(("lifx.photons", "__all__")) if __main__ is not None: register.add_pairs(("lifx.photons", "__main__")) # Import our addons register.recursive_import_known() # Resolve our addons register.recursive_resolve_imported() return register
def tags_spec(self): # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html#tag-restrictions # Keys = 127 UTF-8 '^aws:' reserved. Values = 255 UTF-8 return dictof( valid_string_spec(validators.regexed("^.{0,127}$")) , formatted(string_spec(), after_format=valid_string_spec(validators.regexed("^(?!aws:).{0,255}$")), formatter=MergedOptionStringFormatter) )
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(resource_policy_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(resource_policy_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) # require_mfa_to_delete is an alias for this permission if val.get("require_mfa_to_delete") is True: delete_policy = {"action": "s3:DeleteBucket", "resource": { "s3": "__self__" }, "Condition": { "Bool": { "aws:MultiFactorAuthPresent": True } } } normalised_delete_policy = resource_policy_dict(effect='Allow').normalise(meta.at("require_mfa_to_delete"), delete_policy) allow_permission.append(normalised_delete_policy) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Bucket , acl = sb.defaulted(sb.match_spec((six.string_types, canned_acl_spec()), (dict, acl_statement_spec('acl', 'acl'))), None) , name = sb.overridden(bucket_name) , location = sb.defaulted(formatted_string, None) , permission = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('bucket', bucket_name))) , tags = sb.dictof(sb.string_spec(), formatted_string) , website = sb.defaulted(website_statement_spec("website", "website"), None) , logging = sb.defaulted(logging_statement_spec("logging", "logging"), None) , lifecycle = sb.defaulted(sb.listof(lifecycle_statement_spec("lifecycle", "lifecycle")), None) ).normalise(meta, val)
class Result(dictobj.Spec): specs = dictobj.Field(sb.dictof(spec_key_spec(), sb.has("normalise"))) extra = dictobj.Field( no_such_key_spec("Use extras instead (notice the s!)")) extras = dictobj.Field( sb.listof(sb.tuple_spec(sb.string_spec(), sb.tupleof(sb.string_spec()))))
def wait_condition_spec(self): """Spec for a wait_condition block""" from harpoon.option_spec import image_objs formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) return create_spec(image_objs.WaitCondition , harpoon = formatted(overridden("{harpoon}"), formatter=MergedOptionStringFormatter) , timeout = defaulted(integer_spec(), 300) , wait_between_attempts = defaulted(float_spec(), 5) , greps = optional_spec(dictof(formatted_string, formatted_string)) , command = optional_spec(listof(formatted_string)) , port_open = optional_spec(listof(integer_spec())) , file_value = optional_spec(dictof(formatted_string, formatted_string)) , curl_result = optional_spec(dictof(formatted_string, formatted_string)) , file_exists = optional_spec(listof(formatted_string)) )
def normalise(self, meta, val): accounts = list(self.accounts(meta)) if not accounts: accounts = [self.default_account_id(meta)] for account_id in accounts: string_or_dict = sb.or_spec(sb.string_spec(), sb.dictof(sb.string_choice_spec(["key_id", "alias"]), sb.string_spec())) for key_id in sb.listof(string_or_dict).normalise(meta, val): alias = None if key_id == "__self__" or (isinstance(key_id, dict) and (key_id.get("alias") == "__self__" or key_id.get("key_id") == "__self__")): if self.self_type != "key": raise BadPolicy("No __self__ key for this policy", meta=meta) else: alias = self.self_name location = self.default_location(meta) else: location = self.location(meta) if not alias: if isinstance(key_id, six.string_types): alias = key_id else: alias = key_id.get("alias") key_id = key_id.get("key_id") if alias: yield "arn:aws:kms:{0}:{1}:alias/{2}".format(location, account_id, alias) else: yield "arn:aws:kms:{0}:{1}:key/{2}".format(location, account_id, key_id)
def targets_spec(self): """ Get us a dictionary of target name to Target object .. autoclass:: photons_app.option_spec.photons_app_spec.Target """ return sb.dictof( self.target_name_spec, Target.FieldSpec(formatter=MergedOptionStringFormatter))
def authentications_spec(self): """Spec for a group of authentication options""" return container_spec(authentication_objs.Authentication , dictof(string_spec(), set_options( reading = optional_spec(authentication_spec()) , writing = optional_spec(authentication_spec()) ) ) )
def wait_condition_spec(self): """Spec for a wait_condition block""" from harpoon.option_spec import image_objs formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) return create_spec( image_objs.WaitCondition, harpoon=formatted(overridden("{harpoon}"), formatter=MergedOptionStringFormatter), timeout=defaulted(integer_spec(), 300), wait_between_attempts=defaulted(float_spec(), 5), greps=optional_spec(dictof(formatted_string, formatted_string)), command=optional_spec(listof(formatted_string)), port_open=optional_spec(listof(integer_spec())), file_value=optional_spec(dictof(formatted_string, formatted_string)), curl_result=optional_spec( dictof(formatted_string, formatted_string)), file_exists=optional_spec(listof(formatted_string)))
def authentications_spec(self): """Spec for a group of authentication options""" return optional_spec( container_spec( authentication_objs.Authentication, dictof( string_spec(), set_options(reading=optional_spec(authentication_spec()), writing=optional_spec( authentication_spec())))))
def tasks_spec(self, available_actions, default_action="run"): """Tasks for a particular stack""" return dictof( self.task_name_spec , create_spec(task_objs.Task , action = defaulted(string_choice_spec(available_actions, "No such task"), default_action) , options = dictionary_spec() , overrides = dictionary_spec() , description = string_spec() ) )
def tasks_spec(self, available_actions, default_action="run"): """Tasks for a particular image""" return dictof( self.task_name_spec , create_spec(task_objs.Task, validators.deprecated_key("spec", "Use ``action`` and ``options`` instead (note that ``action`` defaults to run)") , action = defaulted(string_choice_spec(available_actions, "No such task"), default_action) , options = dictionary_spec() , overrides = dictionary_spec() , description = string_spec() ) )
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) gateway_name = meta.key_names()['_key_name_0'] gateway_location = formatted_string().normalise(meta.at('location'), val.get('location', '')) return sb.create_spec(Gateway , name = sb.overridden(gateway_name) , location = sb.required(formatted_string()) , stages = sb.listof(formatted_string()) , api_keys = sb.listof(api_key_spec()) , domain_names = sb.dictof(sb.string_spec(), custom_domain_name_spec(gateway_location)) , resources = sb.dictof(sb.string_spec(), gateway_resource_spec()) ).normalise(meta, val)
def normalise_filled(self, meta, val): val = sb.dictof(sb.string_choice_spec(["s3", "inline", "directory"]), sb.any_spec()).normalise(meta, val) if not val: raise BadSpecValue( "Please specify s3, inline or directory for your code", meta=meta) if len(val) > 1: raise BadSpecValue( "Please only specify one of s3, inline or directory for your code", got=list(val.keys()), meta=meta) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "s3" in val: return sb.create_spec( S3Code, key=formatted_string, bucket=formatted_string, version=sb.defaulted(sb.string_spec(), NotSpecified)).normalise(meta, val['s3']) elif "inline" in val: path = [p for p, _ in meta._path] path.pop() runtime = meta.everything['.'.join(path)].get("runtime", "python") runtime = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter).normalise( meta.at("runtime"), runtime) return sb.create_spec(InlineCode, code=sb.string_spec(), runtime=sb.overridden(runtime)).normalise( meta, {"code": val['inline']}) else: directory = val['directory'] if isinstance(val['directory'], six.string_types): directory = {"directory": val['directory']} if 'directory' in directory: formatted_string = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter) directory['directory'] = formatted_string.normalise( meta.at("directory").at("directory"), directory['directory']) return sb.create_spec(DirectoryCode, directory=sb.directory_spec(), exclude=sb.listof( sb.string_spec())).normalise( meta, directory)
def setup(self, **kwargs): account_spec = sb.set_options( account_id = sb.required(sb.string_spec()) , role_to_assume = sb.required(sb.string_spec()) ) kwargs = sb.set_options( accounts = sb.required(sb.dictof(sb.string_spec(), account_spec)) , ordered_accounts = sb.required(sb.listof(sb.string_spec())) , cloudability_auth_token = sb.required(sb.any_spec()) ).normalise(Meta({}, []), kwargs) for key, val in kwargs.items(): setattr(self, key, val)
def tasks_spec(self, available_actions, default_action="run"): """Tasks for a particular image""" return dictof( self.task_name_spec, create_spec( task_objs.Task, validators.deprecated_key( "spec", "Use ``action`` and ``options`` instead (note that ``action`` defaults to run)" ), action=defaulted( string_choice_spec(available_actions, "No such task"), default_action), options=dictionary_spec(), overrides=dictionary_spec(), description=string_spec()))
def extra_prepare(self, configuration, args_dict): """Called before the configuration.converters are activated""" harpoon = MergedOptions.using( configuration.get('harpoon', MergedOptions()).as_dict(), dict(args_dict.get("harpoon", MergedOptions()).items())).as_dict() # Args_dict may itself be a MergedOptions while "harpoon" in args_dict: del args_dict["harpoon"] # Create the addon getter and register the crosshair namespace self.addon_getter = AddonGetter() self.addon_getter.add_namespace("harpoon.crosshairs", Result.FieldSpec(), Addon.FieldSpec()) # Initiate the addons from our configuration self.register = Register(self.addon_getter, self) if ("addons" in harpoon) and ( type(harpoon["addons"]) in (MergedOptions, dict) or getattr(harpoon["addons"], "is_dict", False)): for namespace, adns in sb.dictof( sb.string_spec(), sb.listof(sb.string_spec())).normalise( Meta(harpoon, []).at("addons"), harpoon["addons"]).items(): self.register.add_pairs(*[(namespace, adn) for adn in adns]) # Import our addons self.register.recursive_import_known() # Resolve our addons self.register.recursive_resolve_imported() # Make sure images is started if "images" not in self.configuration: self.configuration["images"] = {} # Add our special stuff to the configuration self.configuration.update( { "$@": harpoon.get("extra", ""), "bash": args_dict["bash"] or NotSpecified, "harpoon": harpoon, "command": args_dict['command'] or NotSpecified, "assume_role": args_dict["assume_role"] or NotSpecified }, source="<args_dict>")
def extra_configuration_collection(self, configuration): """Hook to do any extra configuration collection or converter registration""" harpoon_spec = HarpoonSpec() for image in configuration.get('images', {}).keys(): self.make_image_converters(image, configuration, harpoon_spec) self.register_converters( { (0, ("content", )): sb.dictof(sb.string_spec(), sb.string_spec()), (0, ("harpoon", )): harpoon_spec.harpoon_spec, (0, ("authentication", )): harpoon_spec.authentications_spec }, Meta, configuration, sb.NotSpecified) # Some other code works better when harpoon no existy if configuration["harpoon"] is sb.NotSpecified: del configuration["harpoon"]
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof( resource_policy_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( resource_policy_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Bucket, name=sb.overridden(bucket_name), location=sb.required(formatted_string), permission=sb.container_spec( Document, sb.listof(resource_policy_statement_spec( 'bucket', bucket_name))), tags=sb.dictof(sb.string_spec(), formatted_string)).normalise(meta, val)
def extra_configuration_collection(self, configuration): """ Hook to do any extra configuration collection or converter registration """ harpoon_spec = HarpoonSpec() for image in configuration.get('images', {}).keys(): self.make_image_converters(image, configuration, harpoon_spec) self.register_converters( { (0, ("content", )): sb.dictof(sb.string_spec(), sb.string_spec()) , (0, ("harpoon", )): harpoon_spec.harpoon_spec , (0, ("authentication", )): harpoon_spec.authentications_spec } , Meta, configuration, sb.NotSpecified ) # Some other code works better when harpoon no existy if configuration["harpoon"] is sb.NotSpecified: del configuration["harpoon"]
def normalise(self, meta, val): accounts = list(self.accounts(meta)) if not accounts: accounts = [self.default_account_id(meta)] for account_id in accounts: string_or_dict = sb.or_spec( sb.string_spec(), sb.dictof(sb.string_choice_spec(["key_id", "alias"]), sb.string_spec())) for key_id in sb.listof(string_or_dict).normalise(meta, val): alias = None if key_id == "__self__" or ( isinstance(key_id, dict) and (key_id.get("alias") == "__self__" or key_id.get("key_id") == "__self__")): if self.self_type != "key": raise BadPolicy("No __self__ key for this policy", meta=meta) else: alias = self.self_name location = self.default_location(meta) else: location = self.location(meta) if not alias: if isinstance(key_id, six.string_types): alias = key_id else: alias = key_id.get("alias") key_id = key_id.get("key_id") if alias: yield "arn:aws:kms:{0}:{1}:alias/{2}".format( location, account_id, alias) else: yield "arn:aws:kms:{0}:{1}:key/{2}".format( location, account_id, key_id)
def normalise_filled(self, meta, val): val = sb.dictof(sb.string_choice_spec(["s3", "inline", "directory"]), sb.any_spec()).normalise(meta, val) if not val: raise BadSpecValue("Please specify s3, inline or directory for your code", meta=meta) if len(val) > 1: raise BadSpecValue("Please only specify one of s3, inline or directory for your code", got=list(val.keys()), meta=meta) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "s3" in val: return sb.create_spec(S3Code , key = formatted_string , bucket = formatted_string , version = sb.defaulted(sb.string_spec(), NotSpecified) ).normalise(meta, val['s3']) elif "inline" in val: path = [p for p, _ in meta._path] path.pop() runtime = meta.everything['.'.join(path)].get("runtime", "python") runtime = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.at("runtime"), runtime) return sb.create_spec(InlineCode , code = sb.string_spec() , runtime = sb.overridden(runtime) ).normalise(meta, {"code": val['inline']}) else: directory = val['directory'] if isinstance(val['directory'], six.string_types): directory = {"directory": val['directory']} if 'directory' in directory: formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) directory['directory'] = formatted_string.normalise(meta.at("directory").at("directory"), directory['directory']) return sb.create_spec(DirectoryCode , directory = sb.directory_spec() , exclude = sb.listof(sb.string_spec()) ).normalise(meta, directory)
def normalise(self, meta, val): val = sb.dictof(sb.string_spec(), sb.dictionary_spec()).normalise(meta, val) if len(val) != 1: raise BadSpecValue("Commands specified as [COMMAND, {options}] may only have one option (either ADD or COPY)", got=val, meta=meta) items = list(val.items())[0] if items[0] not in ("ADD", "COPY"): raise BadSpecValue("Commands specified as [COMMAND, {options}] may only have one option (either ADD or COPY)", got=items[0], meta=meta) if items[0] == "ADD": spec = complex_ADD_spec() else: spec = complex_COPY_spec() result = [] for val in spec.normalise(meta.at(items[0]), items[1]): if isinstance(val, Command): result.append(val) else: result.extend(val) return result
def harpoon_spec(self): """Spec for harpoon options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec(Harpoon , config = optional_spec(file_spec()) , tag = optional_spec(string_spec()) , extra = defaulted(formatted_string, "") , debug = defaulted(boolean(), False) , addons = dictof(string_spec(), listof(string_spec())) , artifact = optional_spec(formatted_string) , extra_files = listof(string_spec()) , chosen_task = defaulted(formatted_string, "list_tasks") , chosen_image = defaulted(formatted_string, "") , flat = defaulted(formatted_boolean, False) , no_cleanup = defaulted(formatted_boolean, False) , interactive = defaulted(formatted_boolean, True) , silent_build = defaulted(formatted_boolean, False) , keep_replaced = defaulted(formatted_boolean, False) , ignore_missing = defaulted(formatted_boolean, False) , no_intervention = defaulted(formatted_boolean, False) , intervene_afterwards = defaulted(formatted_boolean, False) , do_push = defaulted(formatted_boolean, False) , only_pushable = defaulted(formatted_boolean, False) , docker_context = any_spec() , docker_context_maker = any_spec() , stdout = defaulted(any_spec(), sys.stdout) , tty_stdin = defaulted(any_spec(), None) , tty_stdout = defaulted(any_spec(), lambda: sys.stdout) , tty_stderr = defaulted(any_spec(), lambda: sys.stderr) )
def harpoon_spec(self): """Spec for harpoon options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec( Harpoon, config=optional_spec(file_spec()), tag=optional_spec(string_spec()), extra=defaulted(formatted_string, ""), debug=defaulted(boolean(), False), addons=dictof(string_spec(), listof(string_spec())), artifact=optional_spec(formatted_string), extra_files=listof(string_spec()), chosen_task=defaulted(formatted_string, "list_tasks"), chosen_image=defaulted(formatted_string, ""), flat=defaulted(formatted_boolean, False), no_cleanup=defaulted(formatted_boolean, False), interactive=defaulted(formatted_boolean, True), silent_build=defaulted(formatted_boolean, False), keep_replaced=defaulted(formatted_boolean, False), ignore_missing=defaulted(formatted_boolean, False), no_intervention=defaulted(formatted_boolean, False), intervene_afterwards=defaulted(formatted_boolean, False), do_push=defaulted(formatted_boolean, False), only_pushable=defaulted(formatted_boolean, False), docker_context=any_spec(), docker_context_maker=any_spec(), stdout=defaulted(any_spec(), sys.stdout), tty_stdin=defaulted(any_spec(), None), tty_stdout=defaulted(any_spec(), lambda: sys.stdout), tty_stderr=defaulted(any_spec(), lambda: sys.stderr))
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(resource_policy_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(resource_policy_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Bucket , name = sb.overridden(bucket_name) , location = sb.required(formatted_string) , permission = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('bucket', bucket_name))) , tags = sb.dictof(sb.string_spec(), formatted_string) ).normalise(meta, val)
def setup_addon_register(self, harpoon): """Setup our addon register""" # Create the addon getter and register the crosshairs namespace self.addon_getter = AddonGetter() self.addon_getter.add_namespace("harpoon.crosshairs", Result.FieldSpec(), Addon.FieldSpec()) # Initiate the addons from our configuration register = Register(self.addon_getter, self) if "addons" in harpoon: addons = harpoon["addons"] if type(addons) in (MergedOptions, dict) or getattr(addons, "is_dict", False): spec = sb.dictof(sb.string_spec(), sb.listof(sb.string_spec())) meta = Meta(harpoon, []).at("addons") for namespace, adns in spec.normalise(meta, addons).items(): register.add_pairs(*[(namespace, adn) for adn in adns]) # Import our addons register.recursive_import_known() # Resolve our addons register.recursive_resolve_imported() return register
def __register__(): return { (10, "encryption_keys"): sb.container_spec(EncryptionKeys, sb.dictof(sb.string_spec(), encryption_keys_spec())) }
def __register__(): return {(10, "encryption_keys"): sb.container_spec(EncryptionKeys, sb.dictof(sb.string_spec(), encryption_keys_spec()))}
def register_configuration(kls): return {(0, "kms_secrets"): sb.dictof(sb.string_spec(), kms_secret_spec())}
def normalise(self, meta, val): val = dictionary_spec().normalise(meta, val).as_dict() if "/" not in val: val["/"] = {"is_index": True} return dictof(string_spec(), dashboard_spec()).normalise(meta, val)
def accounts_spec(self): """Spec for accounts options""" formatted_account_id = formatted(valid_account_id(), MergedOptionStringFormatter, expected_type=six.string_types) return dictof(string_spec(), formatted_account_id)
def environments_spec(self): """Spec for each environment options""" return dictof( string_spec(), match_spec((str, copy_environment_spec()), (dict, self.environment_spec)))
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs class persistence_shell_spec(Spec): """Make the persistence shell default to the shell on the image""" def normalise(self, meta, val): shell = defaulted(string_spec(), "/bin/bash").normalise(meta, meta.everything[["images", meta.key_names()["_key_name_2"]]].get("shell", NotSpecified)) shell = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), shell).normalise(meta, val) return shell return create_spec(image_objs.Image , validators.deprecated_key("persistence", "The persistence feature has been removed") , validators.deprecated_key("squash_after", "The squash feature has been removed") , validators.deprecated_key("squash_before_push", "The squash feature has been removed") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon = any_spec() # default the name to the key of the image , tag = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , key_name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , image_name = optional_spec(string_spec()) , image_index = formatted(defaulted(string_spec(), ""), formatter=MergedOptionStringFormatter) , container_name = optional_spec(string_spec()) , image_name_prefix = defaulted(string_spec(), "") , no_tty_option = defaulted(formatted(boolean(), formatter=MergedOptionStringFormatter), False) , user = defaulted(string_spec(), None) , configuration = any_spec() , vars = dictionary_spec() , assume_role = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , deleteable_image = defaulted(boolean(), False) , authentication = self.authentications_spec # The spec itself , shell = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash") , bash = delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , command = delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , commands = required(container_spec(Commands, listof(command_spec()))) , cache_from = delayed(or_spec(boolean(), listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)))) , cleanup_intermediate_images = defaulted(boolean(), True) , links = listof(specs.link_spec(), expect=image_objs.Link) , context = self.context_spec , wait_condition = optional_spec(self.wait_condition_spec) , lxc_conf = defaulted(filename_spec(), None) , volumes = create_spec(image_objs.Volumes , mount = listof(specs.mount_spec(), expect=image_objs.Mount) , share_with = listof(formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image)) ) , dependency_options = dictof(specs.image_name_spec() , create_spec(image_objs.DependencyOptions , attached = defaulted(boolean(), False) , wait_condition = optional_spec(self.wait_condition_spec) ) ) , env = listof(specs.env_spec(), expect=image_objs.Environment) , ports = listof(specs.port_spec(), expect=image_objs.Port) , ulimits = defaulted(listof(dictionary_spec()), None) , log_config = defaulted(listof(dictionary_spec()), None) , security_opt = defaulted(listof(string_spec()), None) , read_only_rootfs = defaulted(boolean(), False) , other_options = create_spec(other_options , start = dictionary_spec() , build = dictionary_spec() , create = dictionary_spec() , host_config = dictionary_spec() ) , network = create_spec(image_objs.Network , dns = defaulted(listof(string_spec()), None) , mode = defaulted(string_spec(), None) , hostname = defaulted(string_spec(), None) , domainname = defaulted(string_spec(), None) , disabled = defaulted(boolean(), False) , dns_search = defaulted(listof(string_spec()), None) , extra_hosts = listof(string_spec()) , network_mode = defaulted(string_spec(), None) , publish_all_ports = defaulted(boolean(), False) ) , cpu = create_spec(image_objs.Cpu , cap_add = defaulted(listof(string_spec()), None) , cpuset_cpus = defaulted(string_spec(), None) , cpuset_mems = defaulted(string_spec(), None) , cap_drop = defaulted(listof(string_spec()), None) , mem_limit = defaulted(integer_spec(), 0) , cpu_shares = defaulted(integer_spec(), None) , memswap_limit = defaulted(integer_spec(), 0) ) , devices = defaulted(listof(dictionary_spec()), None) , privileged = defaulted(boolean(), False) , restart_policy = defaulted(string_spec(), None) )
def __register__(): return { (80, "buckets"): sb.container_spec(Buckets, sb.dictof(sb.string_spec(), buckets_spec())) }
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), recursive=optional_spec( create_spec( image_objs.Recursive, action=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), persist=required( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter)))), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec(image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted( string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image))), dependency_options=dictof( specs.image_name_spec(), create_spec(image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec( self.wait_condition_spec))), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec(other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec()), network=create_spec(image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False)), cpu=create_spec(image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0)), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None))
def stack_spec(self): """Spec for each stack""" return create_spec( stack_objs.Stack, validators.deprecated_key( "url_checker", "Use ``confirm_deployment.url_checker1``"), validators.deprecated_key( "deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``"), validators.deprecated_key( "sns_confirmation", "Use ``confirm_deployment.sns_confirmation``"), validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``"), validators.deprecated_key( "instance_count_limit", "Use ``scaling_options.instance_count_limit``"), bespin=any_spec(), name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), stack_name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), environment=formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), build_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), stack_name_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), tags=self.tags_spec, termination_protection=defaulted(boolean(), False), stack_json=valid_stack_json( default="{config_root}/{_key_name_1}.json"), stack_yaml=valid_stack_yaml( default="{config_root}/{_key_name_1}.yaml"), params_json=valid_params_json( default="{config_root}/{environment}/{_key_name_1}-params.json" ), params_yaml=valid_params_yaml( default="{config_root}/{environment}/{_key_name_1}-params.yaml" ), stack_policy=valid_policy_json( default="{config_root}/{_key_name_1}-policy.json"), role_name=formatted(string_spec(), formatter=MergedOptionStringFormatter), build_first=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_after=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_timeout=defaulted(integer_spec(), 1200), ignore_deps=defaulted(boolean(), False), vars=delayed( dictof(string_spec(), stack_specs.var_spec(), nested=True)), skip_update_if_equivalent=listof(stack_specs.skipper_spec()), suspend_actions=defaulted(boolean(), False), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), artifact_retention_after_deployment=defaulted(boolean(), False), command=optional_spec(string_spec()), netscaler=optional_spec(self.netscaler_spec), notify_stackdriver=defaulted(boolean(), False), stackdriver=optional_spec( create_spec( stack_objs.Stackdriver, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), deployment_version=defaulted( formatted(string_spec(), formatter=MergedOptionStringFormatter), "<version>"))), dns=optional_spec( stack_specs.dns_spec( create_spec( stack_objs.DNS, vars=dictof( string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True), providers=dictof(string_spec(), stack_specs.dns_provider_spec()), sites=delayed( dictof(string_spec(), stack_specs.dns_site_spec()))))), scaling_options=create_spec( ScalingOptions, highest_min=defaulted(integer_spec(), 2), instance_count_limit=defaulted(integer_spec(), 10)), artifacts=container_spec( artifact_objs.ArtifactCollection, dictof( string_spec(), create_spec( artifact_objs.Artifact, not_created_here=defaulted(boolean(), False), compression_type=string_choice_spec(["gz", "xz"]), history_length=integer_spec(), cleanup_prefix=optional_spec(string_spec()), upload_to=formatted( string_spec(), formatter=MergedOptionStringFormatter), commands=listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand), paths=listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath), files=listof( create_spec( artifact_objs.ArtifactFile, validators.has_either(["content", "task"]), content=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), task=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), path=formatted( string_spec(), formatter=MergedOptionStringFormatter), task_runner=formatted( always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter))))) ), newrelic=optional_spec( create_spec( stack_objs.NewRelic, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), account_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), application_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), deployed_version=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), downtimer_options=optional_spec( dictof( valid_string_spec(valid_alerting_system()), create_spec( stack_objs.DowntimerOptions, hosts=listof( formatted( string_spec(), formatter=MergedOptionStringFormatter))))), alerting_systems=optional_spec( dictof(string_spec(), self.alerting_system_spec)), ssh=optional_spec( create_spec( stack_objs.SSH, validators.deprecated_key( "autoscaling_group_id", "Use ``auto_scaling_group_name``"), user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), address=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance=optional_spec( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_path=formatted( defaulted( string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter), instance_key_path=formatted( defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter), storage_type=formatted( defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter), storage_host=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), confirm_deployment=optional_spec(self.confirm_deployment_spec))
def __register__(): return {(80, "buckets"): sb.container_spec(Buckets, sb.dictof(sb.string_spec(), buckets_spec()))}
def normalise_filled(s, meta, val): meta.everything = meta.everything.wrapped() meta.everything["__site_environments__"] = this[ "environments"].as_dict() spec = sb.dictof(sb.string_spec(), sb.listof(formatted_string)) return spec.normalise(meta, val.as_dict())
def plans_spec(self): """Spec for plans""" return dictof(string_spec(), listof(string_spec()))
return sb.create_spec(UltraDNSProvider, name=sb.formatted( sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), provider_type=sb.required(sb.string_spec()), username=sb.required(formatted_string), password=sb.required(formatted_string)) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) artifact_command_spec = lambda: sb.create_spec( ArtifactCommand, copy=sb.listof(artifact_path_spec()), modify=sb.dictof(sb.string_spec(), sb.set_options(append=sb.listof(formatted_string))), command=sb.listof(formatted_string), timeout=sb.defaulted(sb.integer_spec(), 600), temp_dir=sb.defaulted(formatted_string, None), add_into_tar=sb.listof(artifact_path_spec())) params_json_spec = lambda: sb.listof( sb.set_options(ParameterKey=sb.required(sb.any_spec()), ParameterValue=sb.required(sb.any_spec()))) params_yaml_spec = lambda: sb.dictof( sb.string_spec(), sb.formatted(sb.string_or_int_as_string_spec(), formatter=MergedOptionStringFormatter)) stack_json_spec = lambda: sb.set_options(Resources=sb.required(
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``"), validators.deprecated_key("recursive", "Use ``persistence``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), persistence=optional_spec( create_spec( image_objs.Persistence, validators.deprecated_key("persist", "Use ``folders``"), action=required(formatted(string_spec(), formatter=MergedOptionStringFormatter)), folders=required(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))), cmd=optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)), shell=defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash"), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter ) ), ) ), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec( image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image) ), ), dependency_options=dictof( specs.image_name_spec(), create_spec( image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec(self.wait_condition_spec), ), ), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec( other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec(), ), network=create_spec( image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False), ), cpu=create_spec( image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0), ), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None), )
if isinstance(val, Command): result.append(val) else: result.extend(val) return result class has_a_space(validators.Validator): def validate(self, meta, val): if ' ' not in val: raise BadOption( "Expected string to have a space (<ACTION> <COMMAND>)", meta=meta, got=val) return val string_command_spec = lambda: sb.container_spec( Command, sb.valid_string_spec(has_a_space())) # Only support ADD commands for the dictionary representation atm dict_key = sb.valid_string_spec(validators.choice("ADD")) dictionary_command_spec = lambda: convert_dict_command_spec( sb.dictof(dict_key, complex_ADD_spec())) # The main spec # We match against, strings, lists, dictionaries and Command objects with different specs command_spec = lambda: sb.match_spec( (six.string_types, string_command_spec()), (list, array_command_spec()), (dict, dictionary_command_spec()), (Command, sb.any_spec()))
def templates_spec(self): """Spec for templates""" return dictof(string_spec(), dictionary_spec())
, name=overridden(name) , bindings=dictof(string_spec() , netscaler_binding_spec()) , tags=listof(string_spec()) , options=formatted_options , overrides=formatted_options , binding_options=formatted_options , environments=optional_spec(listof(valid_environment_spec())) ) if typ == "sslcertkey": options["link"] = listof(string_spec()) as_dict = set_options(**options).normalise(meta, val) return kls(**dict((name, as_dict[name]) for name in options)) configuration_spec = lambda: dictof(string_spec(), dictof(string_spec(), netscaler_config_spec())) class GenericNetscalerConfig(dictobj): fields = ["typ", "name", "bindings", "tags", "options", "binding_options", "environments", "overrides", ("link", NotSpecified)] def dependencies(self, configuration): """Get the bindings dependencies for this configuration item""" for typ, binding_options in self.bindings.items(): if typ in configuration: for item in binding_options.wanted(configuration[typ].values()): if item in configuration[typ]: yield configuration[typ][item].long_name def payload(self, environment, current=None): """Create payload for creating/updating the config""" if hasattr(self.options, "as_dict"):
def __register__(): return { (21, "roles"): sb.container_spec(Roles, sb.dictof(sb.string_spec(), role_spec())) }
def __register__(): return {(22, "lambda"): sb.container_spec(Lambdas, sb.dictof(sb.string_spec(), lambdas_spec()))}
def __register__(): return {(99, "apigateway"): sb.container_spec(Gateways, sb.dictof(sb.string_spec(), gateways_spec()))}
integer_spec, directory_spec, delayed, Spec, ) from input_algorithms.dictobj import dictobj from textwrap import dedent import six valid_import_name = regexed("[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*:[a-zA-Z_][a-zA-Z_0-9]") formatted_dict_or_string_or_list = lambda: match_spec( (six.string_types, formatted(string_spec(), MergedOptionStringFormatter)), ((list,), lambda: listof(formatted_dict_or_string_or_list())), fallback=lambda: dictof(string_spec(), formatted_dict_or_string_or_list()), ) class dashboards_spec(Spec): def normalise(self, meta, val): val = dictionary_spec().normalise(meta, val).as_dict() if "/" not in val: val["/"] = {"is_index": True} return dictof(string_spec(), dashboard_spec()).normalise(meta, val) class dashboard_spec(Spec): def normalise(self, meta, val): val = dictionary_spec().normalise(meta, val) if val.get("is_index"):
name=sb.overridden(name), gateway_location=sb.overridden(self.gateway_location), zone=formatted_string(), stage=formatted_string(), base_path=sb.defaulted(formatted_string(), "(none)"), certificate=sb.required(certificate_spec())).normalise(meta, val) while result.zone and result.zone.endswith("."): result.zone = result.zone[:-1] return result formatted_dictionary_or_string = lambda: sb.match_spec( (six.string_types, formatted_string()), fallback=sb.dictof(sb.string_spec(), formatted_string())) mapping_spec = lambda: sb.create_spec( Mapping, content_type=sb.defaulted(formatted_string(), "application/json"), template=sb.defaulted(formatted_dictionary_or_string(), "$input.json('$')" )) class aws_resource_spec(Spec): def setup(self, method, resource_name): self.method = method self.resource_name = resource_name def normalise(self, meta, val): result = sb.create_spec(
def __register__(): return {(21, "roles"): sb.container_spec(Roles, sb.dictof(sb.string_spec(), role_spec()))}
def __register__(): return { (99, "apigateway"): sb.container_spec(Gateways, sb.dictof(sb.string_spec(), gateways_spec())) }
def environments_spec(self): """Spec for each environment options""" return dictof( string_spec() , match_spec((str, copy_environment_spec()), (dict, self.environment_spec)) )
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof( resource_policy_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( resource_policy_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) # require_mfa_to_delete is an alias for this permission if val.get("require_mfa_to_delete") is True: delete_policy = { "action": "s3:DeleteBucket", "resource": { "s3": "__self__" }, "Condition": { "Bool": { "aws:MultiFactorAuthPresent": True } } } normalised_delete_policy = resource_policy_dict( effect='Allow').normalise(meta.at("require_mfa_to_delete"), delete_policy) allow_permission.append(normalised_delete_policy) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Bucket, acl=sb.defaulted( sb.match_spec((six.string_types, canned_acl_spec()), (dict, acl_statement_spec('acl', 'acl'))), None), name=sb.overridden(bucket_name), location=sb.defaulted(formatted_string, None), permission=sb.container_spec( Document, sb.listof(resource_policy_statement_spec( 'bucket', bucket_name))), tags=sb.dictof(sb.string_spec(), formatted_string), website=sb.defaulted(website_statement_spec("website", "website"), None), logging=sb.defaulted(logging_statement_spec("logging", "logging"), None), lifecycle=sb.defaulted( sb.listof(lifecycle_statement_spec("lifecycle", "lifecycle")), None)).normalise(meta, val)
def stack_spec(self): """Spec for each stack""" return create_spec(stack_objs.Stack , validators.deprecated_key("url_checker", "Use ``confirm_deployment.url_checker1``") , validators.deprecated_key("deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``") , validators.deprecated_key("sns_confirmation", "Use ``confirm_deployment.sns_confirmation``") , validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``") , validators.deprecated_key("instance_count_limit", "Use ``scaling_options.instance_count_limit``") , bespin = any_spec() , name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , key_name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , stack_name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , environment = formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter) , env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , build_env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , stack_name_env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , tags = dictionary_spec() , stack_json = valid_stack_json(default="{config_root}/{_key_name_1}.json") , params_json = valid_params_json(default="{config_root}/{environment}/{_key_name_1}-params.json") , params_yaml = valid_params_yaml(default="{config_root}/{environment}/{_key_name_1}-params.yaml") , build_first = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , build_after = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , build_timeout = defaulted(integer_spec(), 1200) , ignore_deps = defaulted(boolean(), False) , vars = dictof(string_spec(), stack_specs.var_spec(), nested=True) , skip_update_if_equivalent = listof(stack_specs.skipper_spec()) , suspend_actions = defaulted(boolean(), False) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , artifact_retention_after_deployment = defaulted(boolean(), False) , command = optional_spec(string_spec()) , netscaler = optional_spec(self.netscaler_spec) , dns = optional_spec(stack_specs.dns_spec(create_spec(stack_objs.DNS , vars = dictof(string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True) , providers = dictof(string_spec(), stack_specs.dns_provider_spec()) , sites = delayed(dictof(string_spec(), stack_specs.dns_site_spec())) ))) , scaling_options = create_spec(ScalingOptions , highest_min = defaulted(integer_spec(), 2) , instance_count_limit = defaulted(integer_spec(), 10) ) , artifacts = container_spec(artifact_objs.ArtifactCollection, dictof(string_spec(), create_spec(artifact_objs.Artifact , not_created_here = defaulted(boolean(), False) , compression_type = string_choice_spec(["gz", "xz"]) , history_length = integer_spec() , cleanup_prefix = optional_spec(string_spec()) , upload_to = formatted(string_spec(), formatter=MergedOptionStringFormatter) , commands = listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand) , paths = listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath) , files = listof(create_spec(artifact_objs.ArtifactFile, validators.has_either(["content", "task"]) , content = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , task = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , path = formatted(string_spec(), formatter=MergedOptionStringFormatter) , task_runner = formatted(always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter) )) ))) , newrelic = optional_spec(create_spec(stack_objs.NewRelic , api_key = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , account_id = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , application_id = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , deployed_version = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) , downtimer_options = optional_spec(dictof(valid_string_spec(valid_alerting_system()) , create_spec(stack_objs.DowntimerOptions , hosts = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) ) )) , alerting_systems = optional_spec(dictof(string_spec(), self.alerting_system_spec)) , ssh = optional_spec(create_spec(stack_objs.SSH , validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``") , user = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_user = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_key_location = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , instance_key_location = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , address = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , instance = optional_spec(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_key_path = formatted(defaulted(string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter) , instance_key_path = formatted(defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter) , storage_type = formatted(defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter) , storage_host = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) , confirm_deployment = optional_spec(self.confirm_deployment_spec) )