def harpoon_spec(self): """Spec for harpoon options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec( Harpoon, config=file_spec(), extra=defaulted(formatted_string, ""), debug=defaulted(boolean(), False), chosen_task=defaulted(formatted_string, "list_tasks"), chosen_image=defaulted(formatted_string, ""), flat=defaulted(formatted_boolean, False), no_cleanup=defaulted(formatted_boolean, False), interactive=defaulted(formatted_boolean, True), silent_build=defaulted(formatted_boolean, False), keep_replaced=defaulted(formatted_boolean, False), ignore_missing=defaulted(formatted_boolean, False), no_intervention=defaulted(formatted_boolean, False), intervene_afterwards=defaulted(formatted_boolean, False), do_push=defaulted(formatted_boolean, False), only_pushable=defaulted(formatted_boolean, False), docker_context=any_spec(), docker_context_maker=any_spec(), stdout=defaulted(any_spec(), sys.stdout), tty_stdout=defaulted(any_spec(), lambda: sys.stdout), tty_stderr=defaulted(any_spec(), lambda: sys.stderr), )
def harpoon_spec(self): """Spec for harpoon options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec( Harpoon, config=file_spec(), extra=defaulted(formatted_string, ""), debug=defaulted(boolean(), False), chosen_task=defaulted(formatted_string, "list_tasks"), chosen_image=defaulted(formatted_string, ""), flat=defaulted(formatted_boolean, False), no_cleanup=defaulted(formatted_boolean, False), interactive=defaulted(formatted_boolean, True), silent_build=defaulted(formatted_boolean, False), keep_replaced=defaulted(formatted_boolean, False), ignore_missing=defaulted(formatted_boolean, False), no_intervention=defaulted(formatted_boolean, False), intervene_afterwards=defaulted(formatted_boolean, False), do_push=defaulted(formatted_boolean, False), only_pushable=defaulted(formatted_boolean, False), docker_context=any_spec(), docker_context_maker=any_spec(), stdout=defaulted(any_spec(), sys.stdout), tty_stdout=defaulted(any_spec(), lambda: sys.stdout), tty_stderr=defaulted(any_spec(), lambda: sys.stderr))
class grant_statement_spec(statement_spec): args = lambda s, self_type, self_name: { 'grantee': sb.required(resource_spec(self_type, self_name, only="iam") ), 'retiree': resource_spec(self_type, self_name, only="iam"), 'operations': sb.required(sb.listof(sb.string_spec())), 'constraints': sb.any_spec(), 'grant_tokens': sb.any_spec() } final_kls = lambda s, *args, **kwargs: GrantStatement(*args, **kwargs)
def bespin_spec(self): """Spec for bespin options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec(Bespin , validators.deprecated_key("region", "Please use ``environments.<env>.region``") , config = file_spec() , configuration = any_spec() , assume_role = optional_spec(string_spec()) , dry_run = defaulted(boolean(), False) , flat = defaulted(boolean(), False) , environment = optional_spec(string_spec()) , no_assume_role = defaulted(formatted_boolean, False) , chosen_task = defaulted(formatted_string, "list_tasks") , chosen_stack = defaulted(formatted_string, "") , chosen_artifact = defaulted(formatted_string, "") , extra_imports = listof(imports.import_spec()) )
def bespin_spec(self): """Spec for bespin options""" formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) formatted_boolean = formatted(boolean(), MergedOptionStringFormatter, expected_type=bool) return create_spec(Bespin, validators.deprecated_key( "region", "Please use ``environments.<env>.region``"), config=file_spec(), configuration=any_spec(), assume_role=optional_spec(string_spec()), extra=defaulted(string_spec(), ""), dry_run=defaulted(boolean(), False), flat=defaulted(boolean(), False), environment=optional_spec(string_spec()), no_assume_role=defaulted(formatted_boolean, False), chosen_task=defaulted(formatted_string, "list_tasks"), chosen_stack=defaulted(formatted_string, ""), chosen_artifact=defaulted(formatted_string, ""), extra_imports=listof(imports.import_spec()))
def normalise(self, meta, val): pairs = [] has_self = False for account_id in self.accounts(meta): users = sb.listof(sb.string_spec()).normalise( meta.at("users"), self.resource.get('users', NotSpecified)) for name in sb.listof(sb.any_spec()).normalise(meta, val): if name == "__self__": if self.self_type != 'role': raise BadPolicy("No __self__ iam role for this policy", meta=meta) else: has_self = True else: pairs.append((name, account_id)) if has_self: pairs.append(("role/{0}".format(self.self_name), self.default_account_id(meta))) for name, account_id in pairs: service = "sts" if name.startswith("assumed-role") else "iam" arn = "arn:aws:{0}::{1}:{2}".format(service, account_id, name) if not users: yield arn else: for user in users: yield "{0}/{1}".format(arn, user)
def normalise(self, meta, val): pairs = [] has_self = False for account_id in self.accounts(meta): users = sb.listof(sb.string_spec()).normalise(meta.at("users"), self.resource.get('users', NotSpecified)) for index, name in enumerate(sb.listof(sb.any_spec()).normalise(meta, val)): if name == "__self__": if self.self_type != 'role': raise BadPolicy("No __self__ iam role for this policy", meta=meta) else: has_self = True else: if isinstance(name, six.string_types): name = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.indexed_at(index), name) pairs.append((name, account_id)) if has_self: pairs.append(("role/{0}".format(self.self_name), self.default_account_id(meta))) for name, account_id in pairs: service = "sts" if name.startswith("assumed-role") else "iam" arn = "arn:aws:{0}::{1}:{2}".format(service, account_id, name) if not users: yield arn else: for user in users: yield "{0}/{1}".format(arn, user)
def normalise(self, meta, val): result = [] for index, item in enumerate( sb.listof(sb.any_spec()).normalise(meta, val)): s3_spec = s3_specs(item, self.self_type, self.self_name) iam_spec = iam_specs(item, self.self_type, self.self_name) kms_spec = kms_specs(item, self.self_type, self.self_name) arn_spec = arn_specs(item, self.self_type, self.self_name) if isinstance(item, six.string_types): result.append(item) else: types = (("iam", iam_spec), ("kms", kms_spec), ("s3", s3_spec), ("arn", arn_spec)) for typ, spec in types: if typ in item: if self.only and typ not in self.only: raise BadPolicy( "Sorry, don't support this resource type here", wanted=typ, available=self.only, meta=meta) for found in spec.normalise( meta.indexed_at(index).at(typ), item[typ]): result.append(found) return sorted(result)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) key_name = meta.key_names()['_key_name_0'] key = sb.create_spec(EncryptionKey , name = sb.overridden(key_name) , location = sb.required(formatted_string) , description = formatted_string , grant = sb.listof(grant_statement_spec('key', key_name)) , admin_users = sb.listof(sb.any_spec()) , permission = sb.listof(sb.dictionary_spec()) , no_root_access = sb.defaulted(sb.boolean(), False) ).normalise(meta, val) statements = key.permission if not key.no_root_access: statements.append({"principal": {"iam": "root"}, "action": "kms:*", "resource": "*", "Sid": ""}) if key.admin_users: for admin_user in key.admin_users: statements.append({"principal": admin_user, "action": "kms:*", "resource": { "kms": "__self__" }, "Sid": ""}) key.policy = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('key', key_name))).normalise(meta.at("admin_users"), statements) return key
def normalise(self, meta, val): if "use" in val: template = val["use"] if template not in meta.everything["templates"]: available = list(meta.everything["templates"].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything["templates"][template], val) formatted_string = sb.formatted( sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types ) key_name = meta.key_names()["_key_name_0"] key = sb.create_spec( EncryptionKey, name=sb.overridden(key_name), location=sb.required(formatted_string), description=formatted_string, grant=sb.listof(grant_statement_spec("key", key_name)), admin_users=sb.listof(sb.any_spec()), ).normalise(meta, val) statements = [{"principal": {"iam": "root"}, "action": "kms:*", "resource": "*", "Sid": ""}] if key.admin_users: for admin_user in key.admin_users: statements.append( {"principal": admin_user, "action": "kms:*", "resource": {"kms": "__self__"}, "Sid": ""} ) key.policy = sb.container_spec(Document, sb.listof(resource_policy_statement_spec("key", key_name))).normalise( meta.at("admin_users"), statements ) return key
def ultradns_site_spec(self, this): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) return sb.create_spec(UltraDNSSite , name = sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , ttl = sb.optional_spec(sb.integer_spec()) , provider = sb.any_spec() , record_type = sb.required(formatted_string) , zone = sb.required(formatted_string) , domain = sb.required(formatted_string) , environments = sb.required(self.dns_environment_spec(this)) )
def make_spec(self): nsd = lambda spec: sb.defaulted(spec, NotSpecified) args = {} for arg, spec in self.args(self.self_type, self.self_name).items(): arg, capitalized = capitalize(arg) args[(arg, capitalized)] = spec kwargs = {} for (arg, capitalized), spec in list(args.items()): kwargs[arg] = nsd(spec) kwargs[capitalized] = sb.any_spec() return args, sb.set_options(**kwargs)
def normalise_filled(self, meta, val): val = sb.dictof(sb.string_choice_spec(["s3", "inline", "directory"]), sb.any_spec()).normalise(meta, val) if not val: raise BadSpecValue( "Please specify s3, inline or directory for your code", meta=meta) if len(val) > 1: raise BadSpecValue( "Please only specify one of s3, inline or directory for your code", got=list(val.keys()), meta=meta) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "s3" in val: return sb.create_spec( S3Code, key=formatted_string, bucket=formatted_string, version=sb.defaulted(sb.string_spec(), NotSpecified)).normalise(meta, val['s3']) elif "inline" in val: path = [p for p, _ in meta._path] path.pop() runtime = meta.everything['.'.join(path)].get("runtime", "python") runtime = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter).normalise( meta.at("runtime"), runtime) return sb.create_spec(InlineCode, code=sb.string_spec(), runtime=sb.overridden(runtime)).normalise( meta, {"code": val['inline']}) else: directory = val['directory'] if isinstance(val['directory'], six.string_types): directory = {"directory": val['directory']} if 'directory' in directory: formatted_string = sb.formatted( sb.string_spec(), formatter=MergedOptionStringFormatter) directory['directory'] = formatted_string.normalise( meta.at("directory").at("directory"), directory['directory']) return sb.create_spec(DirectoryCode, directory=sb.directory_spec(), exclude=sb.listof( sb.string_spec())).normalise( meta, directory)
def normalise(self, meta, val): from harpoon.option_spec.harpoon_specs import HarpoonSpec formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "from" not in val: raise BadSpecValue("Specifying [COPY, {options}] must contain 'from' in the options", meta=meta) if type(val["from"]) is int: val["from_image"] = val["from"] else: img, conf = complex_from_image_spec().normalise(meta.at("from"), val["from"]) val["from_image"] = conf val["image"] = img val = sb.create_spec(CommandCopyExtra , from_image = sb.any_spec() , path = sb.required(sb.string_spec()) , to = sb.required(sb.string_spec()) , image = sb.optional_spec(sb.any_spec()) ).normalise(meta, val) return list(val.commands(meta))
def normalise(self, meta, val): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) img, conf = complex_from_image_spec().normalise(meta.at("image"), val["image"]) val["conf"] = conf return sb.create_spec(CommandContentAddDict , image = sb.overridden(img) , conf = sb.any_spec() , path = formatted_string , images = sb.overridden(meta.everything.get("images", [])) , docker_api = sb.overridden(meta.everything["harpoon"].docker_api) ).normalise(meta, val)
def ultradns_site_spec(self, this): formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) return sb.create_spec( UltraDNSSite, name=sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), ttl=sb.optional_spec(sb.integer_spec()), provider=sb.any_spec(), record_type=sb.required(formatted_string), zone=sb.required(formatted_string), domain=sb.required(formatted_string), environments=sb.required(self.dns_environment_spec(this)))
def setup(self, **kwargs): account_spec = sb.set_options( account_id = sb.required(sb.string_spec()) , role_to_assume = sb.required(sb.string_spec()) ) kwargs = sb.set_options( accounts = sb.required(sb.dictof(sb.string_spec(), account_spec)) , ordered_accounts = sb.required(sb.listof(sb.string_spec())) , cloudability_auth_token = sb.required(sb.any_spec()) ).normalise(Meta({}, []), kwargs) for key, val in kwargs.items(): setattr(self, key, val)
def make_spec(self): nsd = lambda spec: sb.defaulted(spec, NotSpecified) args = {} for arg, spec in self.args(self.self_type, self.self_name).items(): arg, capitalized = capitalize(arg) args[(arg, capitalized)] = spec kwargs = {} for (arg, capitalized), spec in list(args.items()): kwargs[arg] = nsd(spec) if capitalized not in kwargs: kwargs[capitalized] = sb.any_spec() filtered_args = dict([((a, c), s) for (a, c), s in args.items() if a and a[0].islower()]) return filtered_args, sb.set_options(**kwargs)
def normalise(self, meta, val): from harpoon.option_spec.harpoon_specs import HarpoonSpec from harpoon.option_spec.image_objs import Image formatted_string = sb.formatted(sb.or_spec(sb.string_spec(), sb.typed(Image)), formatter=MergedOptionStringFormatter) img = val["conf"] = sb.set_options(image = formatted_string).normalise(meta, val)["image"] if isinstance(img, six.string_types): val["conf"] = HarpoonSpec().image_spec.normalise(meta.at("image"), {"commands": ["FROM {0}".format(img)]}) val["conf"].image_name = img return sb.create_spec(CommandContentAddDict , image = sb.overridden(img) , conf = sb.any_spec() , path = formatted_string , images = sb.overridden(meta.everything.get("images", [])) , docker_context = sb.overridden(meta.everything["harpoon"].docker_context) ).normalise(meta, val)
def normalise(self, meta, val): result = [] for index, item in enumerate(sb.listof(sb.any_spec()).normalise(meta, val)): s3_spec = s3_specs(item, self.self_type, self.self_name) iam_spec = iam_specs(item, self.self_type, self.self_name) kms_spec = kms_specs(item, self.self_type, self.self_name) arn_spec = arn_specs(item, self.self_type, self.self_name) if isinstance(item, six.string_types): result.append(item) else: types = (("iam", iam_spec), ("kms", kms_spec), ("s3", s3_spec), ("arn", arn_spec)) for typ, spec in types: if typ in item: if self.only and typ not in self.only: raise BadPolicy("Sorry, don't support this resource type here", wanted=typ, available=self.only, meta=meta) for found in spec.normalise(meta.indexed_at(index).at(typ), item[typ]): result.append(found) return sorted(result)
def normalise_filled(self, meta, val): val = sb.dictof(sb.string_choice_spec(["s3", "inline", "directory"]), sb.any_spec()).normalise(meta, val) if not val: raise BadSpecValue("Please specify s3, inline or directory for your code", meta=meta) if len(val) > 1: raise BadSpecValue("Please only specify one of s3, inline or directory for your code", got=list(val.keys()), meta=meta) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) if "s3" in val: return sb.create_spec(S3Code , key = formatted_string , bucket = formatted_string , version = sb.defaulted(sb.string_spec(), NotSpecified) ).normalise(meta, val['s3']) elif "inline" in val: path = [p for p, _ in meta._path] path.pop() runtime = meta.everything['.'.join(path)].get("runtime", "python") runtime = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter).normalise(meta.at("runtime"), runtime) return sb.create_spec(InlineCode , code = sb.string_spec() , runtime = sb.overridden(runtime) ).normalise(meta, {"code": val['inline']}) else: directory = val['directory'] if isinstance(val['directory'], six.string_types): directory = {"directory": val['directory']} if 'directory' in directory: formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) directory['directory'] = formatted_string.normalise(meta.at("directory").at("directory"), directory['directory']) return sb.create_spec(DirectoryCode , directory = sb.directory_spec() , exclude = sb.listof(sb.string_spec()) ).normalise(meta, directory)
def selection(kls, kls_name, wanted, **kwargs): """ Return a new dictobj() that only creates a new class with a selection of the fields We can also mark some fields as required, some as optional, or all as optional/required. For example: .. code-block:: python class Blah(dictobj.Spec): one = dictobj.Field(sb.string_spec()) two = dictobj.Field(sb.string_spec()) three = dictobj.Field(sb.string_spec()) Meh = Blah.selection("Meh", ["one", "two"], all_optional=True) meh = Meh(one="1") assert meh.one == "1" assert meh.two is sb.NotSpecified assert not hasattr(meh, "three") keyword Options are as follows: optional list of keys to make optional required list of keys to make required all_required boolean saying to set all keys to required all_optional boolean saying to set all keys to optional This works by returning a new class with only some of the fields in the fields list .. note:: The keyword options only work for dictobj.Spec objects and are ignored for normal dictobj objects """ fields = kls.fields name_map = {} any_spec = sb.any_spec() # Make a map of name of the field to the field itself # Fields may be either <name> or (<name>, <default>) for field in fields: field_name = field if type(field) is tuple: field_name = field_name[0] name_map[field_name] = field # Make sure we are selecting fields that exist missing = set(wanted) - set(name_map) if missing: raise BadSpec("Tried to make a selection from keys that don't exist", missing=missing, available=list(name_map), wanted=wanted) # The final result isn't inheriting from kls so that we can not inherit fields we don't want # But we still want everything else from kls to pretend it's inherited....... # I doubt this will work with super though..... feel free to raise an issue if this is undesirable... attrs = {} extra = set(dir(kls)) - set(dir(dictobj)) - set(name_map) - set(["FieldSpec"]) attrs.update(dict((k, getattr(dictobj, k)) for k in extra)) # Collect our new fields new_fields = {} for field_name, field in name_map.items(): if field_name in wanted: if type(fields) is dict: new_fields[field] = fields[field] else: new_fields[field] = any_spec if not hasattr(kls, "FieldSpec"): # We weren't selecting from dictobj.Spec, so let's just set the fields and be done # Normal dictobj has no normalise functionality and so no point in setting such things attrs["fields"] = new_fields return type(kls_name, (dictobj, ), attrs) # Ok, so, for dictobj.Spec, we set attrs on the class rather than fields # So let's seed the attrs with our cloned fields for name in name_map: if name in wanted: attrs[name] = getattr(kls, name) if getattr(attrs[name], "is_input_algorithms_field", False): attrs[name] = attrs[name].clone() def wrap(spec, wrapper): """Helper to wrap a spec with some wrapper""" h = None s = spec # A spec can be <options> or (<help string, <options>) if type(s) is tuple: h, s = spec # <options> can be a callable, an input_algorithms field or, well, it shouldn't be anything else... if callable(s): s = lambda: wrapper(s()) else: if getattr(s, "is_input_algorithms_field", False): # We don't want to override the default with optional_spec if wrapper is sb.optional_spec and s.default is not sb.NotSpecified: s = s.clone() else: # We also don't want to override an existing wrapper if s.wrapper is not sb.NotSpecified: s = s.clone(wrapper=lambda spec: wrapper(s.wrapper(spec))) else: s = s.clone(wrapper=wrapper) else: s = lambda: wrapper(s or any_spec) return s def all_wrap(key, wrapper): """helper for wrapping all fields""" if kwargs.get(key): for field, val in new_fields.items(): attrs[field] = wrap(val, wrapper) def specific_wrap(key, wrapper): """Helper for wrapping specific keys""" missing = [] for field_name in kwargs.get(key, []): field = name_map[field_name] if field in new_fields: attrs[field] = wrap(new_fields[field], wrapper) else: missing.append(field_name) if missing: raise BadSpec("Tried to wrap keys that didn't exist", wrap_as=key, missing=missing, available=list(name_map), wanted=kwargs.get(key)) # Ok, now we use our wrap helper for optional settings all_wrap("all_optional", sb.optional_spec) # Required is used to override all_optional specific_wrap("required", sb.required) # Set all things to required if so desired all_wrap("all_required", sb.required) # And override all_required with optional specific_wrap("optional", sb.optional_spec) # Finally, we return our new class! return type(kls_name, (dictobj.Spec, ), attrs)
import nose import boto import mock import sys import os if sys.version_info[0] == 2 and sys.version_info[1] == 6: # This can be removed when we can use latest Httpretty again def mock_s3(func): def wrapped(*args): raise nose.SkipTest("No moto support for python2.6 atm") return wrapped else: from moto import mock_s3 optional_any = lambda: sb.optional_spec(sb.any_spec()) artifact_spec = sb.create_spec(Artifact , compression_type = optional_any() , history_length = optional_any() , upload_to = optional_any() , paths = optional_any() , files = optional_any() , commands = optional_any() ) describe BespinCase, "ArtifactCollection": describe "clean_old_artifacts": @mock_s3 it "does nothing if dry_run is True": s3 = S3() conn = s3.conn = boto.connect_s3()
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``"), validators.deprecated_key("recursive", "Use ``persistence``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec(or_spec(boolean(), container_spec(Commands, listof(command_spec())))), persistence=optional_spec( create_spec( image_objs.Persistence, validators.deprecated_key("persist", "Use ``folders``"), action=required(formatted(string_spec(), formatter=MergedOptionStringFormatter)), folders=required(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))), cmd=optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)), shell=defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash"), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter ) ), ) ), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec( image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image) ), ), dependency_options=dictof( specs.image_name_spec(), create_spec( image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec(self.wait_condition_spec), ), ), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec( other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec(), ), network=create_spec( image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False), ), cpu=create_spec( image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0), ), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None), )
if isinstance(val, Command): result.append(val) else: result.extend(val) return result class has_a_space(validators.Validator): def validate(self, meta, val): if ' ' not in val: raise BadOption( "Expected string to have a space (<ACTION> <COMMAND>)", meta=meta, got=val) return val string_command_spec = lambda: sb.container_spec( Command, sb.valid_string_spec(has_a_space())) # Only support ADD commands for the dictionary representation atm dict_key = sb.valid_string_spec(validators.choice("ADD")) dictionary_command_spec = lambda: convert_dict_command_spec( sb.dictof(dict_key, complex_ADD_spec())) # The main spec # We match against, strings, lists, dictionaries and Command objects with different specs command_spec = lambda: sb.match_spec( (six.string_types, string_command_spec()), (list, array_command_spec()), (dict, dictionary_command_spec()), (Command, sb.any_spec()))
, name = sb.formatted(sb.overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , provider_type = sb.required(sb.string_spec()) , username = sb.required(formatted_string) , password = sb.required(formatted_string) ) formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) artifact_command_spec = lambda : sb.create_spec(ArtifactCommand , copy = sb.listof(artifact_path_spec()) , modify = sb.dictof(sb.string_spec(), sb.set_options(append=sb.listof(formatted_string))) , command = sb.listof(formatted_string) , timeout = sb.defaulted(sb.integer_spec(), 600) , temp_dir = sb.defaulted(formatted_string, None) , add_into_tar = sb.listof(artifact_path_spec()) ) params_json_spec = lambda: sb.listof(sb.set_options( ParameterKey = sb.required(sb.any_spec()) , ParameterValue = sb.required(sb.any_spec()) )) params_yaml_spec = lambda: sb.dictionary_spec() stack_json_spec = lambda: sb.set_options( Resources = sb.required(sb.dictof(sb.string_spec(), sb.set_options(Type=sb.required(sb.string_spec()), Properties=sb.optional_spec(sb.dictionary_spec())))) , Parameters = sb.optional_spec(sb.dictof(sb.string_spec(), sb.dictionary_spec())) , Outputs = sb.optional_spec(sb.dictof(sb.string_spec(), sb.dictionary_spec())) )
formatted_string = sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter) artifact_command_spec = lambda: sb.create_spec( ArtifactCommand, copy=sb.listof(artifact_path_spec()), modify=sb.dictof(sb.string_spec(), sb.set_options(append=sb.listof(formatted_string))), command=sb.listof(formatted_string), timeout=sb.defaulted(sb.integer_spec(), 600), temp_dir=sb.defaulted(formatted_string, None), add_into_tar=sb.listof(artifact_path_spec())) params_json_spec = lambda: sb.listof( sb.set_options(ParameterKey=sb.required(sb.any_spec()), ParameterValue=sb.required(sb.any_spec()))) params_yaml_spec = lambda: sb.dictof( sb.string_spec(), sb.formatted(sb.string_or_int_as_string_spec(), formatter=MergedOptionStringFormatter)) stack_json_spec = lambda: sb.set_options(Resources=sb.required( sb.dictof( sb.string_spec(), sb.set_options(Type=sb.required(sb.string_spec()), Properties=sb.optional_spec(sb.dictionary_spec())))), Parameters=sb.optional_spec( sb.dictof(sb.string_spec(), sb.dictionary_spec())),
num_results = len(results) if num_results >= self.value: return num_results return -1 Type.install(("Bool", 1, bool, bool), ("Int8", 8, "<b", int), ("Uint8", 8, "<B", int), ("BoolInt", 8, "<?", (bool, int)), ("Int16", 16, "<h", int), ("Uint16", 16, "<H", int), ("Int32", 32, "<i", int), ("Uint32", 32, "<I", int), ("Int64", 64, "<q", int), ("Uint64", 64, "<Q", int), ("Float", 32, "<f", float), ("Double", 64, "<d", float), ("Bytes", None, None, bytes), ("String", None, None, str), ("Reserved", None, None, bytes), ("CSV", None, None, (list, str, ",")), ("JSON", None, None, json)) json_spec = sb.match_spec( (bool, sb.any_spec()), (int, sb.any_spec()), (float, sb.any_spec()), (str, sb.any_spec()), (list, lambda: sb.listof(json_spec)), (type(None), sb.any_spec()), fallback=lambda: sb.dictof(sb.string_spec(), json_spec)) # Here so we don't have to instantiate these every time we get a value from a packet static_conversion_from_spec = { any: sb.any_spec(), bool: boolean(), float: float_spec(), (bool, int): boolean_as_int_spec(), json: json_spec }
def normalise(self, meta, val): result = [] for val in self.spec.normalise(meta, val).values(): if isinstance(val, Command): result.append(val) else: result.extend(val) return result class has_a_space(validators.Validator): def validate(self, meta, val): if ' ' not in val: raise BadOption("Expected string to have a space (<ACTION> <COMMAND>)", meta=meta, got=val) return val string_command_spec = lambda: sb.container_spec(Command, sb.valid_string_spec(has_a_space())) # Only support ADD commands for the dictionary representation atm dict_key = sb.valid_string_spec(validators.choice("ADD")) dictionary_command_spec = lambda: convert_dict_command_spec(sb.dictof(dict_key, complex_ADD_spec())) # The main spec # We match against, strings, lists, dictionaries and Command objects with different specs command_spec = lambda: sb.match_spec( (six.string_types, string_command_spec()) , (list, array_command_spec()) , (dict, dictionary_command_spec()) , (Command, sb.any_spec()) )
def stack_spec(self): """Spec for each stack""" return create_spec(stack_objs.Stack , validators.deprecated_key("url_checker", "Use ``confirm_deployment.url_checker1``") , validators.deprecated_key("deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``") , validators.deprecated_key("sns_confirmation", "Use ``confirm_deployment.sns_confirmation``") , validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``") , validators.deprecated_key("instance_count_limit", "Use ``scaling_options.instance_count_limit``") , bespin = any_spec() , name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , key_name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , stack_name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , environment = formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter) , env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , build_env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , stack_name_env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , tags = dictionary_spec() , stack_json = valid_stack_json(default="{config_root}/{_key_name_1}.json") , params_json = valid_params_json(default="{config_root}/{environment}/{_key_name_1}-params.json") , params_yaml = valid_params_yaml(default="{config_root}/{environment}/{_key_name_1}-params.yaml") , build_first = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , build_after = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , build_timeout = defaulted(integer_spec(), 1200) , ignore_deps = defaulted(boolean(), False) , vars = dictof(string_spec(), stack_specs.var_spec(), nested=True) , skip_update_if_equivalent = listof(stack_specs.skipper_spec()) , suspend_actions = defaulted(boolean(), False) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , artifact_retention_after_deployment = defaulted(boolean(), False) , command = optional_spec(string_spec()) , netscaler = optional_spec(self.netscaler_spec) , dns = optional_spec(stack_specs.dns_spec(create_spec(stack_objs.DNS , vars = dictof(string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True) , providers = dictof(string_spec(), stack_specs.dns_provider_spec()) , sites = delayed(dictof(string_spec(), stack_specs.dns_site_spec())) ))) , scaling_options = create_spec(ScalingOptions , highest_min = defaulted(integer_spec(), 2) , instance_count_limit = defaulted(integer_spec(), 10) ) , artifacts = container_spec(artifact_objs.ArtifactCollection, dictof(string_spec(), create_spec(artifact_objs.Artifact , not_created_here = defaulted(boolean(), False) , compression_type = string_choice_spec(["gz", "xz"]) , history_length = integer_spec() , cleanup_prefix = optional_spec(string_spec()) , upload_to = formatted(string_spec(), formatter=MergedOptionStringFormatter) , commands = listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand) , paths = listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath) , files = listof(create_spec(artifact_objs.ArtifactFile, validators.has_either(["content", "task"]) , content = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , task = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , path = formatted(string_spec(), formatter=MergedOptionStringFormatter) , task_runner = formatted(always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter) )) ))) , newrelic = optional_spec(create_spec(stack_objs.NewRelic , api_key = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , account_id = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , application_id = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , env = listof(stack_specs.env_spec(), expect=stack_objs.Environment) , deployed_version = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) , downtimer_options = optional_spec(dictof(valid_string_spec(valid_alerting_system()) , create_spec(stack_objs.DowntimerOptions , hosts = listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)) ) )) , alerting_systems = optional_spec(dictof(string_spec(), self.alerting_system_spec)) , ssh = optional_spec(create_spec(stack_objs.SSH , validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``") , user = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_user = required(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_key_location = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , instance_key_location = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , address = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , instance = optional_spec(listof(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , auto_scaling_group_name = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , bastion_key_path = formatted(defaulted(string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter) , instance_key_path = formatted(defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter) , storage_type = formatted(defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter) , storage_host = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) )) , confirm_deployment = optional_spec(self.confirm_deployment_spec) )
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs class persistence_shell_spec(Spec): """Make the persistence shell default to the shell on the image""" def normalise(self, meta, val): shell = defaulted(string_spec(), "/bin/bash").normalise(meta, meta.everything[["images", meta.key_names()["_key_name_2"]]].get("shell", NotSpecified)) shell = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), shell).normalise(meta, val) return shell return create_spec(image_objs.Image , validators.deprecated_key("persistence", "The persistence feature has been removed") , validators.deprecated_key("squash_after", "The squash feature has been removed") , validators.deprecated_key("squash_before_push", "The squash feature has been removed") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon = any_spec() # default the name to the key of the image , tag = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , name = formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter) , key_name = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter) , image_name = optional_spec(string_spec()) , image_index = formatted(defaulted(string_spec(), ""), formatter=MergedOptionStringFormatter) , container_name = optional_spec(string_spec()) , image_name_prefix = defaulted(string_spec(), "") , no_tty_option = defaulted(formatted(boolean(), formatter=MergedOptionStringFormatter), False) , user = defaulted(string_spec(), None) , configuration = any_spec() , vars = dictionary_spec() , assume_role = optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter)) , deleteable_image = defaulted(boolean(), False) , authentication = self.authentications_spec # The spec itself , shell = defaulted(formatted(string_spec(), formatter=MergedOptionStringFormatter), "/bin/bash") , bash = delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , command = delayed(optional_spec(formatted(string_spec(), formatter=MergedOptionStringFormatter))) , commands = required(container_spec(Commands, listof(command_spec()))) , cache_from = delayed(or_spec(boolean(), listof(formatted(string_spec(), formatter=MergedOptionStringFormatter)))) , cleanup_intermediate_images = defaulted(boolean(), True) , links = listof(specs.link_spec(), expect=image_objs.Link) , context = self.context_spec , wait_condition = optional_spec(self.wait_condition_spec) , lxc_conf = defaulted(filename_spec(), None) , volumes = create_spec(image_objs.Volumes , mount = listof(specs.mount_spec(), expect=image_objs.Mount) , share_with = listof(formatted(string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image)) ) , dependency_options = dictof(specs.image_name_spec() , create_spec(image_objs.DependencyOptions , attached = defaulted(boolean(), False) , wait_condition = optional_spec(self.wait_condition_spec) ) ) , env = listof(specs.env_spec(), expect=image_objs.Environment) , ports = listof(specs.port_spec(), expect=image_objs.Port) , ulimits = defaulted(listof(dictionary_spec()), None) , log_config = defaulted(listof(dictionary_spec()), None) , security_opt = defaulted(listof(string_spec()), None) , read_only_rootfs = defaulted(boolean(), False) , other_options = create_spec(other_options , start = dictionary_spec() , build = dictionary_spec() , create = dictionary_spec() , host_config = dictionary_spec() ) , network = create_spec(image_objs.Network , dns = defaulted(listof(string_spec()), None) , mode = defaulted(string_spec(), None) , hostname = defaulted(string_spec(), None) , domainname = defaulted(string_spec(), None) , disabled = defaulted(boolean(), False) , dns_search = defaulted(listof(string_spec()), None) , extra_hosts = listof(string_spec()) , network_mode = defaulted(string_spec(), None) , publish_all_ports = defaulted(boolean(), False) ) , cpu = create_spec(image_objs.Cpu , cap_add = defaulted(listof(string_spec()), None) , cpuset_cpus = defaulted(string_spec(), None) , cpuset_mems = defaulted(string_spec(), None) , cap_drop = defaulted(listof(string_spec()), None) , mem_limit = defaulted(integer_spec(), 0) , cpu_shares = defaulted(integer_spec(), None) , memswap_limit = defaulted(integer_spec(), 0) ) , devices = defaulted(listof(dictionary_spec()), None) , privileged = defaulted(boolean(), False) , restart_policy = defaulted(string_spec(), None) )
def image_spec(self): """Spec for each image""" from harpoon.option_spec import image_specs as specs from harpoon.option_spec import image_objs return create_spec( image_objs.Image # Change the context options , validators.deprecated_key("exclude_context", "Use ``context.exclude``"), validators.deprecated_key("use_git_timestamps", "Use ``context.use_git_timestamps``"), validators.deprecated_key("respect_gitignore", "Use ``context.use_gitignore``"), validators.deprecated_key("parent_dir", "Use ``context.parent_dir``") # Changed how volumes_from works , validators.deprecated_key("volumes_from", "Use ``volumes.share_with``") # Deprecated link , validators.deprecated_key("link", "Use ``links``") # Harpoon options , harpoon=any_spec() # default the name to the key of the image , name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), image_name=optional_spec(string_spec()), image_index=defaulted(string_spec(), ""), container_name=optional_spec(string_spec()), image_name_prefix=defaulted(string_spec(), ""), user=defaulted(string_spec(), None), mtime=defaulted(any_spec(), time.time()), configuration=any_spec(), vars=dictionary_spec(), deleteable_image=defaulted(boolean(), False) # The spec itself , bash=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), command=delayed( optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter))), commands=required(container_spec(Commands, listof(command_spec()))), squash_after=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), squash_before_push=optional_spec( or_spec(boolean(), container_spec(Commands, listof(command_spec())))), recursive=optional_spec( create_spec( image_objs.Recursive, action=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), persist=required( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), image_name=delayed( many_format( overridden("images.{_key_name_2}.image_name"), formatter=MergedOptionStringFormatter)))), links=listof(specs.link_spec(), expect=image_objs.Link), context=self.context_spec, wait_condition=optional_spec(self.wait_condition_spec), lxc_conf=defaulted(filename_spec(), None), volumes=create_spec(image_objs.Volumes, mount=listof(specs.mount_spec(), expect=image_objs.Mount), share_with=listof( formatted( string_spec(), MergedOptionStringFormatter, expected_type=image_objs.Image))), dependency_options=dictof( specs.image_name_spec(), create_spec(image_objs.DependencyOptions, attached=defaulted(boolean(), False), wait_condition=optional_spec( self.wait_condition_spec))), env=listof(specs.env_spec(), expect=image_objs.Environment), ports=listof(specs.port_spec(), expect=image_objs.Port), ulimits=defaulted(listof(dictionary_spec()), None), log_config=defaulted(listof(dictionary_spec()), None), security_opt=defaulted(listof(string_spec()), None), read_only_rootfs=defaulted(boolean(), False), other_options=create_spec(other_options, start=dictionary_spec(), build=dictionary_spec(), create=dictionary_spec(), host_config=dictionary_spec()), network=create_spec(image_objs.Network, dns=defaulted(listof(string_spec()), None), mode=defaulted(string_spec(), None), hostname=defaulted(string_spec(), None), domainname=defaulted(string_spec(), None), disabled=defaulted(boolean(), False), dns_search=defaulted(listof(string_spec()), None), extra_hosts=listof(string_spec()), network_mode=defaulted(string_spec(), None), publish_all_ports=defaulted(boolean(), False)), cpu=create_spec(image_objs.Cpu, cap_add=defaulted(boolean(), None), cpuset=defaulted(listof(string_spec()), None), cap_drop=defaulted(boolean(), None), mem_limit=defaulted(integer_spec(), 0), cpu_shares=defaulted(integer_spec(), None), memswap_limit=defaulted(integer_spec(), 0)), devices=defaulted(listof(dictionary_spec()), None), privileged=defaulted(boolean(), False), restart_policy=defaulted(string_spec(), None))
import time import nose import mock import sys import os if sys.version_info[0] == 2 and sys.version_info[1] == 6: # This can be removed when we can use latest Httpretty again def mock_s3_deprecated(func): def wrapped(*args): raise nose.SkipTest("No moto support for python2.6 atm") return wrapped else: from moto import mock_s3 optional_any = lambda: sb.optional_spec(sb.any_spec()) artifact_spec = sb.create_spec(Artifact , compression_type = optional_any() , history_length = optional_any() , upload_to = optional_any() , paths = optional_any() , files = optional_any() , commands = optional_any() ) describe BespinCase, "ArtifactCollection": describe "clean_old_artifacts": @mock_s3 it "does nothing if dry_run is True": s3 = S3() environment = {}
def selection(kls, kls_name, wanted, **kwargs): """ Return a new dictobj() that only creates a new class with a selection of the fields We can also mark some fields as required, some as optional, or all as optional/required. For example: .. code-block:: python class Blah(dictobj.Spec): one = dictobj.Field(sb.string_spec()) two = dictobj.Field(sb.string_spec()) three = dictobj.Field(sb.string_spec()) Meh = Blah.selection("Meh", ["one", "two"], all_optional=True) meh = Meh(one="1") assert meh.one == "1" assert meh.two is sb.NotSpecified assert not hasattr(meh, "three") keyword Options are as follows: optional list of keys to make optional required list of keys to make required all_required boolean saying to set all keys to required all_optional boolean saying to set all keys to optional This works by returning a new class with only some of the fields in the fields list .. note:: The keyword options only work for dictobj.Spec objects and are ignored for normal dictobj objects """ fields = kls.fields name_map = {} any_spec = sb.any_spec() # Make a map of name of the field to the field itself # Fields may be either <name> or (<name>, <default>) for field in fields: field_name = field if type(field) is tuple: field_name = field_name[0] name_map[field_name] = field # Make sure we are selecting fields that exist missing = set(wanted) - set(name_map) if missing: raise BadSpec( "Tried to make a selection from keys that don't exist", missing=missing, available=list(name_map), wanted=wanted) # The final result isn't inheriting from kls so that we can not inherit fields we don't want # But we still want everything else from kls to pretend it's inherited....... # I doubt this will work with super though..... feel free to raise an issue if this is undesirable... attrs = {} extra = set(dir(kls)) - set(dir(dictobj)) - set(name_map) - set( ["FieldSpec"]) attrs.update(dict((k, getattr(dictobj, k)) for k in extra)) # Collect our new fields new_fields = {} for field_name, field in name_map.items(): if field_name in wanted: if type(fields) is dict: new_fields[field] = fields[field] else: new_fields[field] = any_spec if not hasattr(kls, "FieldSpec"): # We weren't selecting from dictobj.Spec, so let's just set the fields and be done # Normal dictobj has no normalise functionality and so no point in setting such things attrs["fields"] = new_fields return type(kls_name, (dictobj, ), attrs) # Ok, so, for dictobj.Spec, we set attrs on the class rather than fields # So let's seed the attrs with our cloned fields for name in name_map: if name in wanted: attrs[name] = getattr(kls, name) if getattr(attrs[name], "is_input_algorithms_field", False): attrs[name] = attrs[name].clone() def wrap(spec, wrapper): """Helper to wrap a spec with some wrapper""" h = None s = spec # A spec can be <options> or (<help string, <options>) if type(s) is tuple: h, s = spec # <options> can be a callable, an input_algorithms field or, well, it shouldn't be anything else... if callable(s): s = lambda: wrapper(s()) else: if getattr(s, "is_input_algorithms_field", False): # We don't want to override the default with optional_spec if wrapper is sb.optional_spec and s.default is not sb.NotSpecified: s = s.clone() else: # We also don't want to override an existing wrapper if s.wrapper is not sb.NotSpecified: s = s.clone( wrapper=lambda spec: wrapper(s.wrapper(spec))) else: s = s.clone(wrapper=wrapper) else: s = lambda: wrapper(s or any_spec) return s def all_wrap(key, wrapper): """helper for wrapping all fields""" if kwargs.get(key): for field, val in new_fields.items(): attrs[field] = wrap(val, wrapper) def specific_wrap(key, wrapper): """Helper for wrapping specific keys""" missing = [] for field_name in kwargs.get(key, []): field = name_map[field_name] if field in new_fields: attrs[field] = wrap(new_fields[field], wrapper) else: missing.append(field_name) if missing: raise BadSpec("Tried to wrap keys that didn't exist", wrap_as=key, missing=missing, available=list(name_map), wanted=kwargs.get(key)) # Ok, now we use our wrap helper for optional settings all_wrap("all_optional", sb.optional_spec) # Required is used to override all_optional specific_wrap("required", sb.required) # Set all things to required if so desired all_wrap("all_required", sb.required) # And override all_required with optional specific_wrap("optional", sb.optional_spec) # Finally, we return our new class! return type(kls_name, (dictobj.Spec, ), attrs)
spec = complex_ADD_spec() else: spec = complex_COPY_spec() result = [] for val in spec.normalise(meta.at(items[0]), items[1]): if isinstance(val, Command): result.append(val) else: result.extend(val) return result class has_a_space(validators.Validator): def validate(self, meta, val): if ' ' not in val: raise BadOption("Expected string to have a space (<ACTION> <COMMAND>)", meta=meta, got=val) return val string_command_spec = lambda: sb.container_spec(Command, sb.valid_string_spec(has_a_space())) # The main spec # We match against, strings, lists, dictionaries and Command objects with different specs command_spec = lambda: sb.match_spec( (six.string_types, string_command_spec()) , (list, array_command_spec()) , (dict, convert_dict_command_spec()) , (Command, sb.any_spec()) )
def normalise_filled(self, meta, val): typ = formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter).normalise(meta, val) name = formatted(overridden("{_key_name_0}"), formatter=MergedOptionStringFormatter).normalise(meta, val) special = {} kls = special.get(typ, GenericNetscalerConfig) formatted_string = formatted(string_spec(), formatter=MergedOptionStringFormatter) formatted_options = dictof(string_spec(), match_spec((six.string_types, formatted_string), fallback=any_spec())) options = dict( typ=overridden(typ) , name=overridden(name) , bindings=dictof(string_spec() , netscaler_binding_spec()) , tags=listof(string_spec()) , options=formatted_options , overrides=formatted_options , binding_options=formatted_options , environments=optional_spec(listof(valid_environment_spec())) ) if typ == "sslcertkey": options["link"] = listof(string_spec()) as_dict = set_options(**options).normalise(meta, val) return kls(**dict((name, as_dict[name]) for name in options))
def stack_spec(self): """Spec for each stack""" return create_spec( stack_objs.Stack, validators.deprecated_key( "url_checker", "Use ``confirm_deployment.url_checker1``"), validators.deprecated_key( "deploys_s3_path", "Use ``confirm_deployment.deploys_s3_path``"), validators.deprecated_key( "sns_confirmation", "Use ``confirm_deployment.sns_confirmation``"), validators.deprecated_key("autoscaling_group_id", "Use ``auto_scaling_group_name``"), validators.deprecated_key( "instance_count_limit", "Use ``scaling_options.instance_count_limit``"), bespin=any_spec(), name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), key_name=formatted(overridden("{_key_name_1}"), formatter=MergedOptionStringFormatter), stack_name=formatted(defaulted(string_spec(), "{_key_name_1}"), formatter=MergedOptionStringFormatter), environment=formatted(overridden("{environment}"), formatter=MergedOptionStringFormatter), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), build_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), stack_name_env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), tags=self.tags_spec, termination_protection=defaulted(boolean(), False), stack_json=valid_stack_json( default="{config_root}/{_key_name_1}.json"), stack_yaml=valid_stack_yaml( default="{config_root}/{_key_name_1}.yaml"), params_json=valid_params_json( default="{config_root}/{environment}/{_key_name_1}-params.json" ), params_yaml=valid_params_yaml( default="{config_root}/{environment}/{_key_name_1}-params.yaml" ), stack_policy=valid_policy_json( default="{config_root}/{_key_name_1}-policy.json"), role_name=formatted(string_spec(), formatter=MergedOptionStringFormatter), build_first=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_after=listof( formatted(string_spec(), formatter=MergedOptionStringFormatter)), build_timeout=defaulted(integer_spec(), 1200), ignore_deps=defaulted(boolean(), False), vars=delayed( dictof(string_spec(), stack_specs.var_spec(), nested=True)), skip_update_if_equivalent=listof(stack_specs.skipper_spec()), suspend_actions=defaulted(boolean(), False), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), artifact_retention_after_deployment=defaulted(boolean(), False), command=optional_spec(string_spec()), netscaler=optional_spec(self.netscaler_spec), notify_stackdriver=defaulted(boolean(), False), stackdriver=optional_spec( create_spec( stack_objs.Stackdriver, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), deployment_version=defaulted( formatted(string_spec(), formatter=MergedOptionStringFormatter), "<version>"))), dns=optional_spec( stack_specs.dns_spec( create_spec( stack_objs.DNS, vars=dictof( string_spec(), formatted(string_spec(), formatter=MergedOptionStringFormatter), nested=True), providers=dictof(string_spec(), stack_specs.dns_provider_spec()), sites=delayed( dictof(string_spec(), stack_specs.dns_site_spec()))))), scaling_options=create_spec( ScalingOptions, highest_min=defaulted(integer_spec(), 2), instance_count_limit=defaulted(integer_spec(), 10)), artifacts=container_spec( artifact_objs.ArtifactCollection, dictof( string_spec(), create_spec( artifact_objs.Artifact, not_created_here=defaulted(boolean(), False), compression_type=string_choice_spec(["gz", "xz"]), history_length=integer_spec(), cleanup_prefix=optional_spec(string_spec()), upload_to=formatted( string_spec(), formatter=MergedOptionStringFormatter), commands=listof(stack_specs.artifact_command_spec(), expect=artifact_objs.ArtifactCommand), paths=listof(stack_specs.artifact_path_spec(), expect=artifact_objs.ArtifactPath), files=listof( create_spec( artifact_objs.ArtifactFile, validators.has_either(["content", "task"]), content=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), task=optional_spec( formatted( string_spec(), formatter=MergedOptionStringFormatter) ), path=formatted( string_spec(), formatter=MergedOptionStringFormatter), task_runner=formatted( always_same_spec("{task_runner}"), formatter=MergedOptionStringFormatter))))) ), newrelic=optional_spec( create_spec( stack_objs.NewRelic, api_key=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), account_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), application_id=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), env=listof(stack_specs.env_spec(), expect=stack_objs.EnvironmentVariable), deployed_version=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), downtimer_options=optional_spec( dictof( valid_string_spec(valid_alerting_system()), create_spec( stack_objs.DowntimerOptions, hosts=listof( formatted( string_spec(), formatter=MergedOptionStringFormatter))))), alerting_systems=optional_spec( dictof(string_spec(), self.alerting_system_spec)), ssh=optional_spec( create_spec( stack_objs.SSH, validators.deprecated_key( "autoscaling_group_id", "Use ``auto_scaling_group_name``"), user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_user=required( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance_key_location=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), address=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), instance=optional_spec( listof( formatted(string_spec(), formatter=MergedOptionStringFormatter))), auto_scaling_group_name=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)), bastion_key_path=formatted( defaulted( string_spec(), "{config_root}/{environment}/bastion_ssh_key.pem"), formatter=MergedOptionStringFormatter), instance_key_path=formatted( defaulted(string_spec(), "{config_root}/{environment}/ssh_key.pem"), formatter=MergedOptionStringFormatter), storage_type=formatted( defaulted(string_choice_spec(["url", "rattic"]), "url"), formatter=MergedOptionStringFormatter), storage_host=optional_spec( formatted(string_spec(), formatter=MergedOptionStringFormatter)))), confirm_deployment=optional_spec(self.confirm_deployment_spec))