def normalise_filled(self, meta, val): typ = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter).normalise(meta, val) name = formatted(overridden("{_key_name_0}"), formatter=MergedOptionStringFormatter).normalise(meta, val) special = {} kls = special.get(typ, GenericNetscalerConfig) formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) formatted_options = dictof(string_spec(), match_spec((six.string_types, formatted_string), fallback=any_spec())) options = dict( typ=overridden(typ) , name=overridden(name) , bindings=dictof(string_spec() , netscaler_binding_spec()) , tags=listof(string_spec()) , options=formatted_options , overrides=formatted_options , binding_options=formatted_options , environments=optional_spec(listof(valid_environment_spec())) ) if typ == "sslcertkey": options["link"] = listof(string_spec()) as_dict = set_options(**options).normalise(meta, val) return kls(**dict((name, as_dict[name]) for name in options))
def password_spec(self): formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) return create_spec(stack_objs.Password , name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , bespin = formatted(overridden("{bespin}"), formatter=MergedOptionStringFormatter) , KMSMasterKey = required(formatted_string) , encryption_context = optional_spec(dictionary_spec()) , grant_tokens = optional_spec(listof(formatted_string)) , crypto_text = required(formatted_string) , vars = dictionary_spec() )
def normalise(self, meta, val): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) img, conf = complex_from_image_spec().normalise(meta.at("image"), val["image"]) val["conf"] = conf return sb.create_spec(CommandContentAddDict , image = sb.overridden(img) , conf = sb.any_spec() , path = formatted_string , images = sb.overridden(meta.everything.get("images", [])) , docker_api = sb.overridden(meta.everything["harpoon"].docker_api) ).normalise(meta, val)
def normalise(self, meta, val): name = meta.key_names()["_key_name_0"] result = sb.create_spec(DomainName , name = sb.overridden(name) , gateway_location = sb.overridden(self.gateway_location) , zone = formatted_string() , stage = formatted_string() , base_path = sb.defaulted(formatted_string(), "(none)") , certificate = sb.required(certificate_spec()) ).normalise(meta, val) while result.zone and result.zone.endswith("."): result.zone = result.zone[:-1] return result
def normalise(self, meta, val): name = meta.key_names()["_key_name_0"] result = sb.create_spec( DomainName, name=sb.overridden(name), gateway_location=sb.overridden(self.gateway_location), zone=formatted_string(), stage=formatted_string(), base_path=sb.defaulted(formatted_string(), "(none)"), certificate=sb.required(certificate_spec())).normalise(meta, val) while result.zone and result.zone.endswith("."): result.zone = result.zone[:-1] return result
def netscaler_spec(self): class to_boolean(Spec): def setup(self, spec): self.spec = spec def normalise_either(self, meta, val): val = self.spec.normalise(meta, val) if type(val) is bool: return val if val == 'False': return False elif val == 'True': return True raise BadConfiguration("Expected a boolean", got=val, meta=meta) return create_spec(netscaler_specs.NetScaler , host = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , dry_run = to_boolean(formatted(overridden("{bespin.dry_run}"), formatter=MergedOptionStringFormatter)) , username = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , configuration_username = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , password = delayed(required(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , configuration_password = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , verify_ssl = defaulted(boolean(), True) , nitro_api_version = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "v1") , configuration = optional_spec(netscaler_specs.configuration_spec()) , syncable_environments = optional_spec(listof(valid_environment_spec())) )
def alerting_system_spec(self): return create_spec(stack_objs.AlertingSystem , name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , type = string_choice_spec(["nagios"]) , endpoint = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , verify_ssl = defaulted(boolean(), True) )
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) gateway_name = meta.key_names()['_key_name_0'] gateway_location = formatted_string().normalise( meta.at('location'), val.get('location', '')) return sb.create_spec( Gateway, name=sb.overridden(gateway_name), location=sb.required(formatted_string()), stages=sb.listof(formatted_string()), api_keys=sb.listof(api_key_spec()), domain_names=sb.dictof(sb.string_spec(), custom_domain_name_spec(gateway_location)), resources=sb.dictof(sb.string_spec(), gateway_resource_spec())).normalise(meta, val)
def ultradns_provider_spec(self): return sb.create_spec(UltraDNSProvider , name = sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , provider_type = sb.required(sb.string_spec()) , username = sb.required(formatted_string) , password = sb.required(formatted_string) )
def extra_configuration_collection(self, configuration): """ Hook to do any extra configuration collection or converter registration Here we register our base configuration converters: photons_app .. autoattribute:: photons_app.option_spec.photons_app_spec.PhotonsAppSpec.photons_app_spec targets .. autoattribute:: photons_app.option_spec.photons_app_spec.PhotonsAppSpec.targets_spec target_register .. autoattribute:: photons_app.option_spec.photons_app_spec.PhotonsAppSpec.target_register_spec protocol_register The protocol_register object from photons_messages """ photons_app_spec = PhotonsAppSpec() self.register_converters( { (0, ("targets", )): photons_app_spec.targets_spec, (0, ("photons_app", )): photons_app_spec.photons_app_spec, (0, ("target_register", )): photons_app_spec.target_register_spec, (0, ("protocol_register", )): sb.overridden(protocol_register), (0, ("reference_resolver_register", )): photons_app_spec.reference_resolver_register_spec }, Meta, configuration, sb.NotSpecified)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter) route_name = meta.key_names()['_key_name_0'] val = sb.create_spec(DNSRoute , name = sb.overridden(route_name) , zone = formatted_string , record_type = sb.string_choice_spec(["CNAME"]) , record_target = formatted_string ).normalise(meta, val) if not val.zone.endswith("."): val.zone = "{0}.".format(val.zone) if not isinstance(val.record_target, six.string_types): if not hasattr(val.record_target, "cname"): raise BadSpecValue("record_target must point at an object with a cname property", got=type(val.record_target), meta=meta) val.record_target = val.record_target.cname return val
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) key_name = meta.key_names()['_key_name_0'] key = sb.create_spec(EncryptionKey , name = sb.overridden(key_name) , location = sb.required(formatted_string) , description = formatted_string , grant = sb.listof(grant_statement_spec('key', key_name)) , admin_users = sb.listof(sb.any_spec()) , permission = sb.listof(sb.dictionary_spec()) , no_root_access = sb.defaulted(sb.boolean(), False) ).normalise(meta, val) statements = key.permission if not key.no_root_access: statements.append({"principal": {"iam": "root"}, "action": "kms:*", "resource": "*", "Sid": ""}) if key.admin_users: for admin_user in key.admin_users: statements.append({"principal": admin_user, "action": "kms:*", "resource": { "kms": "__self__" }, "Sid": ""}) key.policy = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('key', key_name))).normalise(meta.at("admin_users"), statements) return key
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) role_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(permission_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(permission_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(permission_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) allow_to_assume_me = sb.listof(trust_dict("principal")).normalise(meta.at("allow_to_assume_me"), val.get("allow_to_assume_me", NotSpecified)) disallow_to_assume_me = sb.listof(trust_dict("notprincipal")).normalise(meta.at("disallow_to_assume_me"), val.get("disallow_to_assume_me", NotSpecified)) val = val.wrapped() val['trust'] = allow_to_assume_me + disallow_to_assume_me val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Role , name = sb.overridden(role_name) , description = formatted_string , trust = sb.container_spec(Document, sb.listof(trust_statement_spec('role', role_name))) , permission = sb.container_spec(Document, sb.listof(permission_statement_spec('role', role_name))) , make_instance_profile = sb.defaulted(sb.boolean(), False) ).normalise(meta, val)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(resource_policy_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(resource_policy_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) # require_mfa_to_delete is an alias for this permission if val.get("require_mfa_to_delete") is True: delete_policy = {"action": "s3:DeleteBucket", "resource": { "s3": "__self__" }, "Condition": { "Bool": { "aws:MultiFactorAuthPresent": True } } } normalised_delete_policy = resource_policy_dict(effect='Allow').normalise(meta.at("require_mfa_to_delete"), delete_policy) allow_permission.append(normalised_delete_policy) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Bucket , acl = sb.defaulted(sb.match_spec((six.string_types, canned_acl_spec()), (dict, acl_statement_spec('acl', 'acl'))), None) , name = sb.overridden(bucket_name) , location = sb.defaulted(formatted_string, None) , permission = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('bucket', bucket_name))) , tags = sb.dictof(sb.string_spec(), formatted_string) , website = sb.defaulted(website_statement_spec("website", "website"), None) , logging = sb.defaulted(logging_statement_spec("logging", "logging"), None) , lifecycle = sb.defaulted(sb.listof(lifecycle_statement_spec("lifecycle", "lifecycle")), None) ).normalise(meta, val)
def normalise(self, meta, val): if "use" in val: template = val["use"] if template not in meta.everything["templates"]: available = list(meta.everything["templates"].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything["templates"][template], val) formatted_string = sb.formatted( sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types ) key_name = meta.key_names()["_key_name_0"] key = sb.create_spec( EncryptionKey, name=sb.overridden(key_name), location=sb.required(formatted_string), description=formatted_string, grant=sb.listof(grant_statement_spec("key", key_name)), admin_users=sb.listof(sb.any_spec()), ).normalise(meta, val) statements = [{"principal": {"iam": "root"}, "action": "kms:*", "resource": "*", "Sid": ""}] if key.admin_users: for admin_user in key.admin_users: statements.append( {"principal": admin_user, "action": "kms:*", "resource": {"kms": "__self__"}, "Sid": ""} ) key.policy = sb.container_spec(Document, sb.listof(resource_policy_statement_spec("key", key_name))).normalise( meta.at("admin_users"), statements ) return key
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] val = sb.create_spec(Lambda , name = sb.overridden(function_name) , role = sb.required(only_one_spec(resource_spec("lambda", function_name, only=["iam"]))) , code = sb.required(function_code_spec()) , handler = function_handler_spec() , timeout = sb.integer_spec() , runtime = sb.required(formatted_string) , location = sb.required(formatted_string) , description = formatted_string , sample_event = sb.defaulted(sb.or_spec(formatted_dictionary(), sb.string_spec()), "") , desired_output_for_test = sb.defaulted(sb.or_spec(formatted_dictionary(), sb.string_spec()), "") , memory_size = sb.defaulted(divisible_by_spec(64), 128) ).normalise(meta, val) # Hack to make sample_event and desired_output_for_test not appear as a MergedOptions for key in ('sample_event', 'desired_output_for_test'): if isinstance(val[key], MergedOptions): v = val[key].as_dict() class Arbritrary(dictobj): fields = list(v.keys()) val[key] = Arbritrary(**v) return val
def normalise(self, meta, val): result = sb.create_spec( LambdaMethod, http_method=sb.overridden(self.method), resource_name=sb.overridden(self.resource_name), function=formatted_string(), location=formatted_string(), account=sb.optional_spec(formatted_string()), require_api_key=sb.defaulted(sb.boolean(), False), request_mapping=sb.defaulted(mapping_spec(), Mapping("application/json", "")), mapping=sb.defaulted( mapping_spec(), Mapping("application/json", "$input.json('$')")), sample_event=sb.or_spec(formatted_dictionary(), sb.string_spec()), desired_output_for_test=sb.or_spec(formatted_dictionary(), sb.string_spec())).normalise( meta, val) for key in ('sample_event', 'desired_output_for_test'): if isinstance(result[key], six.string_types): v = result[key] if v.startswith("{") and v.endswith("}"): v = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter).normalise( meta.at(key), v) result[key] = v function = result.function location = None if result.location is not NotSpecified and location is not None: raise BadSpecValue( "Please don't specify a defined lambda function and location at the same time", meta=meta) if not isinstance(function, six.string_types): location = function.location function = function.name if location is None and result.location is NotSpecified: raise BadSpecValue("Location is a required key!", meta=meta) result.function = function result.location = location return result
def ultradns_provider_spec(self): return sb.create_spec(UltraDNSProvider, name=sb.formatted( sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), provider_type=sb.required(sb.string_spec()), username=sb.required(formatted_string), password=sb.required(formatted_string))
def normalise(self, meta, val): from harpoon.option_spec.harpoon_specs import HarpoonSpec from harpoon.option_spec.image_objs import Image formatted_string = sb.formatted(sb.or_spec(sb.string_spec(), sb.typed(Image)), formatter=MergedOptionStringFormatter) img = val["conf"] = sb.set_options(image = formatted_string).normalise(meta, val)["image"] if isinstance(img, six.string_types): val["conf"] = HarpoonSpec().image_spec.normalise(meta.at("image"), {"commands": ["FROM {0}".format(img)]}) val["conf"].image_name = img return sb.create_spec(CommandContentAddDict , image = sb.overridden(img) , conf = sb.any_spec() , path = formatted_string , images = sb.overridden(meta.everything.get("images", [])) , docker_context = sb.overridden(meta.everything["harpoon"].docker_context) ).normalise(meta, val)
def normalise(self, meta, val): return sb.create_spec(MockMethod , http_method = sb.overridden(self.method) , resource_name = sb.overridden(self.resource_name) , request_mapping = sb.defaulted(mapping_spec(), Mapping("application/json", '{"statusCode": 200}')) , mapping = mapping_spec() , require_api_key = sb.defaulted(sb.boolean(), False) , sample_event = sb.or_spec(sb.dictionary_spec(), sb.string_spec()) , desired_output_for_test = sb.or_spec(sb.dictionary_spec(), sb.string_spec()) ).normalise(meta, val) for key in ('sample_event', 'desired_output_for_test'): if isinstance(result[key], six.string_types): v = result[key] if v.startswith("{") and v.endswith("}"): v = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.at(key), v) result[key] = v
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) role_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(permission_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(permission_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( permission_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) allow_to_assume_me = sb.listof(trust_dict("principal")).normalise( meta.at("allow_to_assume_me"), val.get("allow_to_assume_me", NotSpecified)) disallow_to_assume_me = sb.listof( trust_dict("notprincipal")).normalise( meta.at("disallow_to_assume_me"), val.get("disallow_to_assume_me", NotSpecified)) if not allow_to_assume_me and not disallow_to_assume_me: raise BadSpecValue( "Roles must have either allow_to_assume_me or disallow_to_assume_me specified", meta=meta) val = val.wrapped() val['trust'] = allow_to_assume_me + disallow_to_assume_me val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Role, name=sb.overridden(role_name), description=formatted_string, attached_policies=sb.listof(formatted_string), trust=sb.container_spec( Document, sb.listof(trust_statement_spec('role', role_name))), permission=sb.container_spec( Document, sb.listof(permission_statement_spec('role', role_name))), make_instance_profile=sb.defaulted(sb.boolean(), False)).normalise(meta, val)
def target_register_spec(self): """ Make a TargetRegister object .. autoclass:: photons_app.option_spec.photons_app_spec.TargetRegister """ return sb.create_spec(TargetRegister, collector=sb.formatted( sb.overridden("{collector}"), formatter=MergedOptionStringFormatter))
def ultradns_site_spec(self, this): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) return sb.create_spec(UltraDNSSite , name = sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , ttl = sb.optional_spec(sb.integer_spec()) , provider = sb.any_spec() , record_type = sb.required(formatted_string) , zone = sb.required(formatted_string) , domain = sb.required(formatted_string) , environments = sb.required(self.dns_environment_spec(this)) )
def normalise_filled(self, meta, val): val = sb.dictof(sb.string_choice_spec(["s3", "inline", "directory"]), sb.any_spec()).normalise(meta, val) if not val: raise BadSpecValue( "Please specify s3, inline or directory for your code", meta=meta) if len(val) > 1: raise BadSpecValue( "Please only specify one of s3, inline or directory for your code", got=list(val.keys()), meta=meta) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "s3" in val: return sb.create_spec( S3Code, key=formatted_string, bucket=formatted_string, version=sb.defaulted(sb.string_spec(), NotSpecified)).normalise(meta, val['s3']) elif "inline" in val: path = [p for p, _ in meta._path] path.pop() runtime = meta.everything['.'.join(path)].get("runtime", "python") runtime = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter).normalise( meta.at("runtime"), runtime) return sb.create_spec(InlineCode, code=sb.string_spec(), runtime=sb.overridden(runtime)).normalise( meta, {"code": val['inline']}) else: directory = val['directory'] if isinstance(val['directory'], six.string_types): directory = {"directory": val['directory']} if 'directory' in directory: formatted_string = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter) directory['directory'] = formatted_string.normalise( meta.at("directory").at("directory"), directory['directory']) return sb.create_spec(DirectoryCode, directory=sb.directory_spec(), exclude=sb.listof( sb.string_spec())).normalise( meta, directory)
def ultradns_site_spec(self, this): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) return sb.create_spec( UltraDNSSite, name=sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), ttl=sb.optional_spec(sb.integer_spec()), provider=sb.any_spec(), record_type=sb.required(formatted_string), zone=sb.required(formatted_string), domain=sb.required(formatted_string), environments=sb.required(self.dns_environment_spec(this)))
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] val = sb.create_spec(Lambda, name=sb.overridden(function_name), role=sb.required( only_one_spec( resource_spec("lambda", function_name, only=["iam"]))), code=sb.required(function_code_spec()), handler=function_handler_spec(), timeout=sb.integer_spec(), runtime=sb.required(formatted_string), location=sb.required(formatted_string), description=formatted_string, sample_event=sb.defaulted( sb.or_spec(formatted_dictionary(), sb.string_spec()), ""), desired_output_for_test=sb.defaulted( sb.or_spec(formatted_dictionary(), sb.string_spec()), ""), memory_size=sb.defaulted(divisible_by_spec(64), 128)).normalise( meta, val) # Hack to make sample_event and desired_output_for_test not appear as a MergedOptions for key in ('sample_event', 'desired_output_for_test'): if isinstance(val[key], MergedOptions): v = val[key].as_dict() class Arbritrary(dictobj): fields = list(v.keys()) val[key] = Arbritrary(**v) return val
def normalise(self, meta, val): result = sb.create_spec(LambdaMethod , http_method = sb.overridden(self.method) , resource_name = sb.overridden(self.resource_name) , function = formatted_string() , location = formatted_string() , account = sb.optional_spec(formatted_string()) , require_api_key = sb.defaulted(sb.boolean(), False) , request_mapping = sb.defaulted(mapping_spec(), Mapping("application/json", "")) , mapping = sb.defaulted(mapping_spec(), Mapping("application/json", "$input.json('$')")) , sample_event = sb.or_spec(formatted_dictionary(), sb.string_spec()) , desired_output_for_test = sb.or_spec(formatted_dictionary(), sb.string_spec()) ).normalise(meta, val) for key in ('sample_event', 'desired_output_for_test'): if isinstance(result[key], six.string_types): v = result[key] if v.startswith("{") and v.endswith("}"): v = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.at(key), v) result[key] = v function = result.function location = None if result.location is not NotSpecified and location is not None: raise BadSpecValue("Please don't specify a defined lambda function and location at the same time", meta=meta) if not isinstance(function, six.string_types): location = function.location function = function.name if location is None and result.location is NotSpecified: raise BadSpecValue("Location is a required key!", meta=meta) result.function = function result.location = location return result
def normalise(self, meta, val): return sb.create_spec( MockMethod, http_method=sb.overridden(self.method), resource_name=sb.overridden(self.resource_name), request_mapping=sb.defaulted( mapping_spec(), Mapping("application/json", '{"statusCode": 200}')), mapping=mapping_spec(), require_api_key=sb.defaulted(sb.boolean(), False), sample_event=sb.or_spec(sb.dictionary_spec(), sb.string_spec()), desired_output_for_test=sb.or_spec(sb.dictionary_spec(), sb.string_spec())).normalise( meta, val) for key in ('sample_event', 'desired_output_for_test'): if isinstance(result[key], six.string_types): v = result[key] if v.startswith("{") and v.endswith("}"): v = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter).normalise( meta.at(key), v) result[key] = v
def wait_condition_spec(self): """Spec for a wait_condition block""" from harpoon.option_spec import image_objs formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) return create_spec(image_objs.WaitCondition , harpoon = formatted(overridden("{harpoon}"), formatter=MergedOptionStringFormatter) , timeout = defaulted(integer_spec(), 300) , wait_between_attempts = defaulted(float_spec(), 5) , greps = optional_spec(dictof(formatted_string, formatted_string)) , command = optional_spec(listof(formatted_string)) , port_open = optional_spec(listof(integer_spec())) , file_value = optional_spec(dictof(formatted_string, formatted_string)) , curl_result = optional_spec(dictof(formatted_string, formatted_string)) , file_exists = optional_spec(listof(formatted_string)) )
def wait_condition_spec(self): """Spec for a wait_condition block""" from harpoon.option_spec import image_objs formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) return create_spec( image_objs.WaitCondition, harpoon=formatted(overridden("{harpoon}"), formatter=MergedOptionStringFormatter), timeout=defaulted(integer_spec(), 300), wait_between_attempts=defaulted(float_spec(), 5), greps=optional_spec(dictof(formatted_string, formatted_string)), command=optional_spec(listof(formatted_string)), port_open=optional_spec(listof(integer_spec())), file_value=optional_spec(dictof(formatted_string, formatted_string)), curl_result=optional_spec( dictof(formatted_string, formatted_string)), file_exists=optional_spec(listof(formatted_string)))
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof( resource_policy_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( resource_policy_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Bucket, name=sb.overridden(bucket_name), location=sb.required(formatted_string), permission=sb.container_spec( Document, sb.listof(resource_policy_statement_spec( 'bucket', bucket_name))), tags=sb.dictof(sb.string_spec(), formatted_string)).normalise(meta, val)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) gateway_name = meta.key_names()['_key_name_0'] gateway_location = formatted_string().normalise(meta.at('location'), val.get('location', '')) return sb.create_spec(Gateway , name = sb.overridden(gateway_name) , location = sb.required(formatted_string()) , stages = sb.listof(formatted_string()) , api_keys = sb.listof(api_key_spec()) , domain_names = sb.dictof(sb.string_spec(), custom_domain_name_spec(gateway_location)) , resources = sb.listof(gateway_resource_spec()) ).normalise(meta, val)
def normalise(self, meta, val): from harpoon.option_spec.harpoon_specs import HarpoonSpec formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) val = sb.apply_validators(meta, val, [validators.either_keys(["context"], ["content"], ["get"], ["formatted"])]) if "get" in val: val = sb.create_spec(CommandAddExtra , get = sb.required(sb.listof(formatted_string)) , prefix = sb.optional_spec(sb.string_spec()) ).normalise(meta, val) if "context" in val: val = sb.create_spec(CommandContextAdd , validators.deprecated_key("mtime", "Since docker 1.8, timestamps no longer invalidate the docker layer cache") , dest = sb.required(formatted_string) , context = sb.required(HarpoonSpec().context_spec) ).normalise(meta, val) if "formatted" in val: val = sb.create_spec(CommandContentAdd , validators.deprecated_key("mtime", "Since docker 1.8, timestamps no longer invalidate the docker layer cache") , dest = sb.required(formatted_string) , content = sb.overridden(sb.NotSpecified) , formatted = sb.container_spec(CommandContentAddString, formatted_string) ).normalise(meta, val) if "content" in val: val = sb.create_spec(CommandContentAdd , validators.deprecated_key("mtime", "Since docker 1.8, timestamps no longer invalidate the docker layer cache") , dest = sb.required(formatted_string) , content = sb.match_spec( (six.string_types, sb.container_spec(CommandContentAddString, sb.string_spec())) , fallback = complex_ADD_from_image_spec() ) ).normalise(meta, val) return list(val.commands(meta))
def normalise_filled(self, meta, val): val = sb.dictof(sb.string_choice_spec(["s3", "inline", "directory"]), sb.any_spec()).normalise(meta, val) if not val: raise BadSpecValue("Please specify s3, inline or directory for your code", meta=meta) if len(val) > 1: raise BadSpecValue("Please only specify one of s3, inline or directory for your code", got=list(val.keys()), meta=meta) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "s3" in val: return sb.create_spec(S3Code , key = formatted_string , bucket = formatted_string , version = sb.defaulted(sb.string_spec(), NotSpecified) ).normalise(meta, val['s3']) elif "inline" in val: path = [p for p, _ in meta._path] path.pop() runtime = meta.everything['.'.join(path)].get("runtime", "python") runtime = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.at("runtime"), runtime) return sb.create_spec(InlineCode , code = sb.string_spec() , runtime = sb.overridden(runtime) ).normalise(meta, {"code": val['inline']}) else: directory = val['directory'] if isinstance(val['directory'], six.string_types): directory = {"directory": val['directory']} if 'directory' in directory: formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) directory['directory'] = formatted_string.normalise(meta.at("directory").at("directory"), directory['directory']) return sb.create_spec(DirectoryCode , directory = sb.directory_spec() , exclude = sb.listof(sb.string_spec()) ).normalise(meta, directory)
def normalise(self, meta, val): from harpoon.option_spec.harpoon_specs import HarpoonSpec formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) val = sb.apply_validators(meta, val, [validators.either_keys(["context"], ["content"], ["get"], ["formatted"])]) if "get" in val: val = sb.create_spec(CommandAddExtra , get = sb.required(sb.listof(formatted_string)) , prefix = sb.optional_spec(sb.string_spec()) ).normalise(meta, val) if "context" in val: val = sb.create_spec(CommandContextAdd , dest = sb.required(formatted_string) , mtime = sb.optional_spec(sb.integer_spec()) , context = sb.required(HarpoonSpec().context_spec) ).normalise(meta, val) if "formatted" in val: val = sb.create_spec(CommandContentAdd , dest = sb.required(formatted_string) , mtime = sb.optional_spec(sb.integer_spec()) , content = sb.overridden(sb.NotSpecified) , formatted = sb.container_spec(CommandContentAddString, formatted_string) ).normalise(meta, val) if "content" in val: val = sb.create_spec(CommandContentAdd , dest = sb.required(formatted_string) , mtime = sb.optional_spec(sb.integer_spec()) , content = sb.match_spec( (six.string_types, sb.container_spec(CommandContentAddString, sb.string_spec())) , fallback = complex_ADD_from_image_spec() ) ).normalise(meta, val) return list(val.commands(meta))
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] return sb.create_spec(Lambda , name = sb.overridden(function_name) , role = sb.required(only_one_spec(resource_spec("lambda", function_name, only=["iam"]))) , code = sb.required(function_code_spec()) , handler = function_handler_spec() , timeout = sb.integer_spec() , runtime = sb.required(formatted_string) , location = sb.required(formatted_string) , description = formatted_string , sample_event = sb.defaulted(sb.or_spec(sb.dictionary_spec(), sb.string_spec()), "") , memory_size = sb.defaulted(divisible_by_spec(64), 128) ).normalise(meta, val)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter) route_name = meta.key_names()['_key_name_0'] val = sb.create_spec(DNSRoute, name=sb.overridden(route_name), zone=formatted_string, record_type=sb.string_choice_spec(["CNAME"]), record_target=formatted_string).normalise( meta, val) if not val.zone.endswith("."): val.zone = "{0}.".format(val.zone) if not isinstance(val.record_target, six.string_types): if not hasattr(val.record_target, "cname"): raise BadSpecValue( "record_target must point at an object with a cname property", got=type(val.record_target), meta=meta) val.record_target = val.record_target.cname return val
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(resource_policy_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(resource_policy_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Bucket , name = sb.overridden(bucket_name) , location = sb.required(formatted_string) , permission = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('bucket', bucket_name))) , tags = sb.dictof(sb.string_spec(), formatted_string) ).normalise(meta, val)
class PhotonsApp(dictobj.Spec): """ The main photons_app object. .. dictobj_params:: """ config = dictobj.Field(sb.file_spec, wrapper=sb.optional_spec, help="The root configuration file") extra = dictobj.Field( sb.string_spec, default="", help="The arguments after the ``--`` in the commandline") debug = dictobj.Field(sb.boolean, default=False, help="Whether we are in debug mode or not") target = dictobj.Field(wrapper=sb.optional_spec, format_into=sb.string_spec, help="The target to use when executing the task") artifact = dictobj.Field(default="", format_into=sb.string_spec, help="The artifact string from the commandline") reference = dictobj.Field(default="", format_into=sb.string_spec, help="The device(s) to send commands to") extra_files = dictobj.Field(sb.string_spec, wrapper=sb.listof, help="Extra files to load") chosen_task = dictobj.Field(default="list_tasks", format_into=sb.string_spec, help="The task that is being executed") cleaners = dictobj.Field( lambda: sb.overridden([]), help= "A list of functions to call when cleaning up at the end of the program" ) final_future = dictobj.Field( sb.overridden("{final_future}"), formatted=True, help="A future representing the end of the program") default_activate_all_modules = dictobj.Field( sb.boolean, default=False, help= "The collector looks at this to determine if we should default to activating all photons modules" ) @memoized_property def loop(self): loop = asyncio.get_event_loop() if self.debug: loop.set_debug(True) return loop @memoized_property def extra_as_json(self): options = "{}" if self.extra in (None, "", sb.NotSpecified) else self.extra try: return json.loads(options) except (TypeError, ValueError) as error: raise BadOption("The options after -- wasn't valid json", error=error) async def cleanup(self, targets): for cleaner in self.cleaners: try: await cleaner() except asyncio.CancelledError: break except KeyboardInterrupt: break except (RuntimeWarning, Exception): exc_info = sys.exc_info() log.error(exc_info[1], exc_info=exc_info) for target in targets: try: if hasattr(target, "finish"): await target.finish() except asyncio.CancelledError: break except KeyboardInterrupt: break except (RuntimeWarning, Exception): exc_info = sys.exc_info() log.error(exc_info[1], exc_info=exc_info)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof( resource_policy_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( resource_policy_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) # require_mfa_to_delete is an alias for this permission if val.get("require_mfa_to_delete") is True: delete_policy = { "action": "s3:DeleteBucket", "resource": { "s3": "__self__" }, "Condition": { "Bool": { "aws:MultiFactorAuthPresent": True } } } normalised_delete_policy = resource_policy_dict( effect='Allow').normalise(meta.at("require_mfa_to_delete"), delete_policy) allow_permission.append(normalised_delete_policy) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Bucket, acl=sb.defaulted( sb.match_spec((six.string_types, canned_acl_spec()), (dict, acl_statement_spec('acl', 'acl'))), None), name=sb.overridden(bucket_name), location=sb.defaulted(formatted_string, None), permission=sb.container_spec( Document, sb.listof(resource_policy_statement_spec( 'bucket', bucket_name))), tags=sb.dictof(sb.string_spec(), formatted_string), website=sb.defaulted(website_statement_spec("website", "website"), None), logging=sb.defaulted(logging_statement_spec("logging", "logging"), None), lifecycle=sb.defaulted( sb.listof(lifecycle_statement_spec("lifecycle", "lifecycle")), None)).normalise(meta, val)
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``"), validators.deprecated_key("recursive", "Use ``persistence``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), persistence=optional_spec( create_spec( image_objs.Persistence, validators.deprecated_key("persist", "Use ``folders``"), action=required(formatted(string_spec(), formatter=MergedOptionStringFormatter)), folders=required(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))), cmd=optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)), shell=defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash"), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter ) ), ) ), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec( image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image) ), ), dependency_options=dictof( specs.image_name_spec(), create_spec( image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec(self.wait_condition_spec), ), ), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec( other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec(), ), network=create_spec( image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False), ), cpu=create_spec( image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0), ), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None), )
def stack_spec(self): """Spec for each stack""" return create_spec(stack_objs.Stack , validators.deprecated_key("url_checker", "Use ``confirm_deployment.url_checker1``") , validators.deprecated_key("deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``") , validators.deprecated_key("sns_confirmation", "Use ``confirm_deployment.sns_confirmation``") , validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``") , validators.deprecated_key("instance_count_limit", "Use ``scaling_options.instance_count_limit``") , bespin = any_spec() , name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , key_name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , stack_name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , environment = formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter) , env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , build_env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , stack_name_env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , tags = dictionary_spec() , stack_json = valid_stack_json(default="{config_root}/{_key_name_1}.json") , params_json = valid_params_json(default="{config_root}/{environment}/{_key_name_1}-params.json") , params_yaml = valid_params_yaml(default="{config_root}/{environment}/{_key_name_1}-params.yaml") , build_first = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , build_after = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , build_timeout = defaulted(integer_spec(), 1200) , ignore_deps = defaulted(boolean(), False) , vars = dictof(string_spec(), stack_specs.var_spec(), nested=True) , skip_update_if_equivalent = listof(stack_specs.skipper_spec()) , suspend_actions = defaulted(boolean(), False) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , artifact_retention_after_deployment = defaulted(boolean(), False) , command = optional_spec(string_spec()) , netscaler = optional_spec(self.netscaler_spec) , dns = optional_spec(stack_specs.dns_spec(create_spec(stack_objs.DNS , vars = dictof(string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True) , providers = dictof(string_spec(), stack_specs.dns_provider_spec()) , sites = delayed(dictof(string_spec(), stack_specs.dns_site_spec())) ))) , scaling_options = create_spec(ScalingOptions , highest_min = defaulted(integer_spec(), 2) , instance_count_limit = defaulted(integer_spec(), 10) ) , artifacts = container_spec(artifact_objs.ArtifactCollection, dictof(string_spec(), create_spec(artifact_objs.Artifact , not_created_here = defaulted(boolean(), False) , compression_type = string_choice_spec(["gz", "xz"]) , history_length = integer_spec() , cleanup_prefix = optional_spec(string_spec()) , upload_to = formatted(string_spec(), formatter=MergedOptionStringFormatter) , commands = listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand) , paths = listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath) , files = listof(create_spec(artifact_objs.ArtifactFile, validators.has_either(["content", "task"]) , content = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , task = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , path = formatted(string_spec(), formatter=MergedOptionStringFormatter) , task_runner = formatted(always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter) )) ))) , newrelic = optional_spec(create_spec(stack_objs.NewRelic , api_key = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , account_id = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , application_id = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , deployed_version = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) , downtimer_options = optional_spec(dictof(valid_string_spec(valid_alerting_system()) , create_spec(stack_objs.DowntimerOptions , hosts = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) ) )) , alerting_systems = optional_spec(dictof(string_spec(), self.alerting_system_spec)) , ssh = optional_spec(create_spec(stack_objs.SSH , validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``") , user = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_user = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_key_location = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , instance_key_location = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , address = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , instance = optional_spec(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_key_path = formatted(defaulted(string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter) , instance_key_path = formatted(defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter) , storage_type = formatted(defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter) , storage_host = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) , confirm_deployment = optional_spec(self.confirm_deployment_spec) )
class TransportTarget(dictobj.Spec): """ This is responsible for bringing together the TransportBridge and the TransportItems It implements the ability to create and destroy args_for_run (the bridge), as well as creating a `script` that may be run with `script.run_with`. We also have higher order functions for finding and forgetting devices. When creating your own target do something like: .. code-block:: python class SocketTarget(TransportTarget): item_kls = lambda s: SocketItem bridge_kls = lambda s: SocketBridge description = dictobj.Field(sb.string_spec, default="Understands how to talk to a device over a TCP socket") ``protocol_register`` and ``final_future`` are retrieved automatically from ``Meta`` if we create the transport by doing ``TransportTarget.normalise(meta, **kwargs)`` Note that the path on the meta cannot be root. So make you meta like: .. code-block:: python from input_algorithms.meta import Meta from option_merge import MergedOptions configuration = MergedOptions.using({"protocol_register": ..., "final_future": asyncio.Future()}) # By saying `at("options")` on the meta we are putting it not at root # So when we resolve final_future we don't get recursive option errors meta = Meta(configuration, []).at("options") Generally you'll be passed in a transport via the ``tasks`` mechanism and you won't have to instantiate it yourself. """ protocol_register = dictobj.Field(sb.overridden("{protocol_register}"), formatted=True) final_future = dictobj.Field(sb.overridden("{final_future}"), formatted=True) default_broadcast = dictobj.Field(sb.defaulted(sb.string_spec(), "255.255.255.255")) item_kls = lambda s: TransportItem bridge_kls = lambda s: TransportBridge description = dictobj.Field(sb.string_spec, default="Base transport functionality") @classmethod def create(kls, configuration, options=None): options = options if options is not None else configuration meta = Meta(configuration, []).at("options") return kls.FieldSpec(formatter=MergedOptionStringFormatter).normalise(meta, options) def script(self, raw): """Return us a ScriptRunnerIterator for the given `raw` against this `target`""" items = list(self.simplify(raw)) if len(items) > 1: items = Pipeline(*items) else: items = items[0] return ScriptRunnerIterator(items, target=self) def session(self): info = {} class Session: async def __aenter__(s): afr = info["afr"] = await self.args_for_run() return afr async def __aexit__(s, exc_type, exc, tb): if "afr" in info: await self.close_args_for_run(info["afr"]) return Session() async def args_for_run(self): """Create an instance of args_for_run. This is designed to be shared amongst many `script`""" afr = self.bridge_kls()(self.final_future, self , protocol_register=self.protocol_register , default_broadcast=self.default_broadcast ) await afr.start() return afr async def close_args_for_run(self, args_for_run): """Close an args_for_run""" args_for_run.finish() async def get_list(self, args_for_run, broadcast=sb.NotSpecified, **kwargs): """Return us the targets that we can find from this bridge""" addr = broadcast if broadcast is not sb.NotSpecified else self.default_broadcast found = await args_for_run.find_devices(addr, **kwargs) return sorted([binascii.hexlify(target[:6]).decode() for target in found]) def device_forgetter(self, args_for_run): """Return a function that may be used to forget a device on this args_for_run""" return args_for_run.forget def find(self, args_for_run): """Return a function that may be used to find a device on this args_for_run""" return args_for_run.find def simplify(self, script_part, chain=None): """ Used by ``self.script`` to convert ``raw`` into TransportItems For each leaf child that is found, we gather messages into groups of messages with a ``pack`` method and yield ``self.item_kls()(group)`` with messages that don't have a ``pack`` method yield as is. For example, let's say we have ``[p1, p2, m1, p3]`` where ``m1`` does not have a ``pack`` method and the others do, we'll yield: * ``self.item_kls()([p1, p2])`` * ``m1`` * ``self.item_kls()([p3])`` """ chain = [] if chain is None else chain if type(script_part) is not list: script_part = [script_part] final = [] errors = [] for p in script_part: if getattr(p, "has_children", False): final.append(p.simplified(self.simplify, chain + [p.name])) continue else: if not hasattr(p, "pack"): errors.append(p) else: final.append(p) if errors: raise InvalidScript("Script part has no pack method!", parts=errors, chain=chain) buf = [] for p in final: if hasattr(p, "pack"): buf.append(p) else: if buf: yield self.item_kls()(buf) buf = [] yield p if buf: yield self.item_kls()(buf)
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs class persistence_shell_spec(Spec): """Make the persistence shell default to the shell on the image""" def normalise(self, meta, val): shell = defaulted(string_spec(), "/bin/bash").normalise(meta, meta.everything[["images", meta.key_names()["_key_name_2"]]].get("shell", NotSpecified)) shell = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), shell).normalise(meta, val) return shell return create_spec(image_objs.Image , validators.deprecated_key("persistence", "The persistence feature has been removed") , validators.deprecated_key("squash_after", "The squash feature has been removed") , validators.deprecated_key("squash_before_push", "The squash feature has been removed") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon = any_spec() # default the name to the key of the image , tag = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , key_name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , image_name = optional_spec(string_spec()) , image_index = formatted(defaulted(string_spec(), ""), formatter=MergedOptionStringFormatter) , container_name = optional_spec(string_spec()) , image_name_prefix = defaulted(string_spec(), "") , no_tty_option = defaulted(formatted(boolean(), formatter=MergedOptionStringFormatter), False) , user = defaulted(string_spec(), None) , configuration = any_spec() , vars = dictionary_spec() , assume_role = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , deleteable_image = defaulted(boolean(), False) , authentication = self.authentications_spec # The spec itself , shell = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash") , bash = delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , command = delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , commands = required(container_spec(Commands, listof(command_spec()))) , cache_from = delayed(or_spec(boolean(), listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)))) , cleanup_intermediate_images = defaulted(boolean(), True) , links = listof(specs.link_spec(), expect=image_objs.Link) , context = self.context_spec , wait_condition = optional_spec(self.wait_condition_spec) , lxc_conf = defaulted(filename_spec(), None) , volumes = create_spec(image_objs.Volumes , mount = listof(specs.mount_spec(), expect=image_objs.Mount) , share_with = listof(formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image)) ) , dependency_options = dictof(specs.image_name_spec() , create_spec(image_objs.DependencyOptions , attached = defaulted(boolean(), False) , wait_condition = optional_spec(self.wait_condition_spec) ) ) , env = listof(specs.env_spec(), expect=image_objs.Environment) , ports = listof(specs.port_spec(), expect=image_objs.Port) , ulimits = defaulted(listof(dictionary_spec()), None) , log_config = defaulted(listof(dictionary_spec()), None) , security_opt = defaulted(listof(string_spec()), None) , read_only_rootfs = defaulted(boolean(), False) , other_options = create_spec(other_options , start = dictionary_spec() , build = dictionary_spec() , create = dictionary_spec() , host_config = dictionary_spec() ) , network = create_spec(image_objs.Network , dns = defaulted(listof(string_spec()), None) , mode = defaulted(string_spec(), None) , hostname = defaulted(string_spec(), None) , domainname = defaulted(string_spec(), None) , disabled = defaulted(boolean(), False) , dns_search = defaulted(listof(string_spec()), None) , extra_hosts = listof(string_spec()) , network_mode = defaulted(string_spec(), None) , publish_all_ports = defaulted(boolean(), False) ) , cpu = create_spec(image_objs.Cpu , cap_add = defaulted(listof(string_spec()), None) , cpuset_cpus = defaulted(string_spec(), None) , cpuset_mems = defaulted(string_spec(), None) , cap_drop = defaulted(listof(string_spec()), None) , mem_limit = defaulted(integer_spec(), 0) , cpu_shares = defaulted(integer_spec(), None) , memswap_limit = defaulted(integer_spec(), 0) ) , devices = defaulted(listof(dictionary_spec()), None) , privileged = defaulted(boolean(), False) , restart_policy = defaulted(string_spec(), None) )
def stack_spec(self): """Spec for each stack""" return create_spec( stack_objs.Stack, validators.deprecated_key( "url_checker", "Use ``confirm_deployment.url_checker1``"), validators.deprecated_key( "deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``"), validators.deprecated_key( "sns_confirmation", "Use ``confirm_deployment.sns_confirmation``"), validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``"), validators.deprecated_key( "instance_count_limit", "Use ``scaling_options.instance_count_limit``"), bespin=any_spec(), name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), stack_name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), environment=formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), build_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), stack_name_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), tags=self.tags_spec, termination_protection=defaulted(boolean(), False), stack_json=valid_stack_json( default="{config_root}/{_key_name_1}.json"), stack_yaml=valid_stack_yaml( default="{config_root}/{_key_name_1}.yaml"), params_json=valid_params_json( default="{config_root}/{environment}/{_key_name_1}-params.json" ), params_yaml=valid_params_yaml( default="{config_root}/{environment}/{_key_name_1}-params.yaml" ), stack_policy=valid_policy_json( default="{config_root}/{_key_name_1}-policy.json"), role_name=formatted(string_spec(), formatter=MergedOptionStringFormatter), build_first=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_after=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_timeout=defaulted(integer_spec(), 1200), ignore_deps=defaulted(boolean(), False), vars=delayed( dictof(string_spec(), stack_specs.var_spec(), nested=True)), skip_update_if_equivalent=listof(stack_specs.skipper_spec()), suspend_actions=defaulted(boolean(), False), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), artifact_retention_after_deployment=defaulted(boolean(), False), command=optional_spec(string_spec()), netscaler=optional_spec(self.netscaler_spec), notify_stackdriver=defaulted(boolean(), False), stackdriver=optional_spec( create_spec( stack_objs.Stackdriver, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), deployment_version=defaulted( formatted(string_spec(), formatter=MergedOptionStringFormatter), "<version>"))), dns=optional_spec( stack_specs.dns_spec( create_spec( stack_objs.DNS, vars=dictof( string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True), providers=dictof(string_spec(), stack_specs.dns_provider_spec()), sites=delayed( dictof(string_spec(), stack_specs.dns_site_spec()))))), scaling_options=create_spec( ScalingOptions, highest_min=defaulted(integer_spec(), 2), instance_count_limit=defaulted(integer_spec(), 10)), artifacts=container_spec( artifact_objs.ArtifactCollection, dictof( string_spec(), create_spec( artifact_objs.Artifact, not_created_here=defaulted(boolean(), False), compression_type=string_choice_spec(["gz", "xz"]), history_length=integer_spec(), cleanup_prefix=optional_spec(string_spec()), upload_to=formatted( string_spec(), formatter=MergedOptionStringFormatter), commands=listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand), paths=listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath), files=listof( create_spec( artifact_objs.ArtifactFile, validators.has_either(["content", "task"]), content=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), task=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), path=formatted( string_spec(), formatter=MergedOptionStringFormatter), task_runner=formatted( always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter))))) ), newrelic=optional_spec( create_spec( stack_objs.NewRelic, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), account_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), application_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), deployed_version=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), downtimer_options=optional_spec( dictof( valid_string_spec(valid_alerting_system()), create_spec( stack_objs.DowntimerOptions, hosts=listof( formatted( string_spec(), formatter=MergedOptionStringFormatter))))), alerting_systems=optional_spec( dictof(string_spec(), self.alerting_system_spec)), ssh=optional_spec( create_spec( stack_objs.SSH, validators.deprecated_key( "autoscaling_group_id", "Use ``auto_scaling_group_name``"), user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), address=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance=optional_spec( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_path=formatted( defaulted( string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter), instance_key_path=formatted( defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter), storage_type=formatted( defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter), storage_host=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), confirm_deployment=optional_spec(self.confirm_deployment_spec))
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), recursive=optional_spec( create_spec( image_objs.Recursive, action=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), persist=required( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter)))), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec(image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted( string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image))), dependency_options=dictof( specs.image_name_spec(), create_spec(image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec( self.wait_condition_spec))), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec(other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec()), network=create_spec(image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False)), cpu=create_spec(image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0)), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None))