def convert_passwords(path, val): log.info("Converting %s", path) password = str(path)[len("passwords."):] configuration.converters.started(path) environment = configuration['bespin'].environment val_as_dict = configuration["passwords"][password].as_dict() if not environment: raise BespinError("No environment was provided", available=list(configuration["environments"].keys())) password_environment_as_dict = {} if ["passwords", password, environment] in configuration: password_environment_as_dict = configuration["passwords", password, environment].as_dict() base = MergedOptions(dont_prefix=path.configuration.dont_prefix, converters=path.configuration.converters) everything = path.configuration.root().wrapped() base.update(val_as_dict) everything[path] = val_as_dict base.update(password_environment_as_dict) everything[path].update(password_environment_as_dict) for thing in (base, everything): thing["__password__"] = val thing["__environment__"] = configuration["environments"][environment] meta = Meta(everything, [("passwords", ""), (password, "")]) return bespin_spec.password_spec.normalise(meta, base)
def main(argv=None): parser = get_parser() args = parser.parse_args(argv) setup_logging() try: bootstrap = BootStrapper() glbls = MergedOptions.using({"global": {"no_resolve": True}}) forced = MergedOptions.using( MergedOptions.KeyValuePairs(args.options or []) , MergedOptions.Attributes(args, ("environment", "resolve_order", "dry_run", "mandatory_options"), lift="global", ignoreable_values=(None, )) ) if forced: log.info("Setting some options: %s", ' | '.join("[{}:{}]".format(key, val) for key, val in forced.as_flat())) glbls.options.extend(forced.options) log.info("Looking in %s for configuration", args.configs) resolved = bootstrap.find_configurations(args.configs, glbls) layers = bootstrap.get_layers(resolved, args.execute) deploy(layers) except CloudCityError as error: print "" print "!" * 80 print "Something went wrong! -- {0}".format(error.__class__.__name__) print "\t{0}".format(error) sys.exit(1)
def normalise(self, meta, val): if "use" in val: template = val["use"] if template not in meta.everything["templates"]: available = list(meta.everything["templates"].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything["templates"][template], val) formatted_string = sb.formatted( sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types ) key_name = meta.key_names()["_key_name_0"] key = sb.create_spec( EncryptionKey, name=sb.overridden(key_name), location=sb.required(formatted_string), description=formatted_string, grant=sb.listof(grant_statement_spec("key", key_name)), admin_users=sb.listof(sb.any_spec()), ).normalise(meta, val) statements = [{"principal": {"iam": "root"}, "action": "kms:*", "resource": "*", "Sid": ""}] if key.admin_users: for admin_user in key.admin_users: statements.append( {"principal": admin_user, "action": "kms:*", "resource": {"kms": "__self__"}, "Sid": ""} ) key.policy = sb.container_spec(Document, sb.listof(resource_policy_statement_spec("key", key_name))).normalise( meta.at("admin_users"), statements ) return key
def find_harpoon_options(self, configuration, args_dict): """Return us all the harpoon options""" d = lambda r: {} if r in (None, "", NotSpecified) else r return MergedOptions.using( dict(d(configuration.get('harpoon')).items()) , dict(d(args_dict.get("harpoon")).items()) ).as_dict()
def make_image(self, options, harpoon_options=None): config_root = self.make_temp_dir() if harpoon_options is None: harpoon_options = {} harpoon_options["docker_context"] = self.docker_client harpoon_options["no_intervention"] = True harpoon_options["docker_context_maker"] = self.new_docker_client harpoon = HarpoonSpec().harpoon_spec.normalise(Meta({}, []), harpoon_options) if "harpoon" not in options: options["harpoon"] = harpoon everything = MergedOptions.using({"harpoon": harpoon, "mtime": mtime, "config_root": config_root}) everything.update({"images": {"awesome_image": options}}) def make_options(): base = everything.wrapped() base.update(options) base["configuration"] = everything return base meta = Meta(everything, []).at("images").at("awesome_image") harpoon_converter = Converter(convert=lambda *args: harpoon, convert_path=["harpoon"]) image_converter = Converter( convert=lambda *args: HarpoonSpec().image_spec.normalise(meta, make_options()) , convert_path=["images", "awesome_image"] ) everything.add_converter(harpoon_converter) everything.add_converter(image_converter) everything.converters.activate() return everything[["images", "awesome_image"]]
def alter_clone_args_dict(self, new_collector, new_args_dict, options=None): return MergedOptions.using( new_args_dict, {"bespin": self.configuration["bespin"].as_dict()}, options)
def formatted(self, *keys, **kwargs): """Get us a formatted value""" val = kwargs.get("value", NotSpecified) default = kwargs.get("default", NotSpecified) path_prefix = kwargs.get("path_prefix", self.path) configuration = kwargs.get("configuration", self.configuration) key = "" if val is NotSpecified: for key in keys: if key in configuration: val = configuration[key] break if val is NotSpecified: if default is NotSpecified: raise NoSuchKey("Couldn't find any of the specified keys in image options", keys=keys, image=self.name) else: return default if path_prefix: path = "{0}.{1}".format(path_prefix, key) else: path = key config = MergedOptions.using(self.all_configuration, {"this": {"name": self.name, "path": self.path}}) return MergedOptionStringFormatter(config, path, value=val).format()
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] val = sb.create_spec(Lambda , name = sb.overridden(function_name) , role = sb.required(only_one_spec(resource_spec("lambda", function_name, only=["iam"]))) , code = sb.required(function_code_spec()) , handler = function_handler_spec() , timeout = sb.integer_spec() , runtime = sb.required(formatted_string) , location = sb.required(formatted_string) , description = formatted_string , sample_event = sb.defaulted(sb.or_spec(formatted_dictionary(), sb.string_spec()), "") , desired_output_for_test = sb.defaulted(sb.or_spec(formatted_dictionary(), sb.string_spec()), "") , memory_size = sb.defaulted(divisible_by_spec(64), 128) ).normalise(meta, val) # Hack to make sample_event and desired_output_for_test not appear as a MergedOptions for key in ('sample_event', 'desired_output_for_test'): if isinstance(val[key], MergedOptions): v = val[key].as_dict() class Arbritrary(dictobj): fields = list(v.keys()) val[key] = Arbritrary(**v) return val
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) gateway_name = meta.key_names()['_key_name_0'] gateway_location = formatted_string().normalise( meta.at('location'), val.get('location', '')) return sb.create_spec( Gateway, name=sb.overridden(gateway_name), location=sb.required(formatted_string()), stages=sb.listof(formatted_string()), api_keys=sb.listof(api_key_spec()), domain_names=sb.dictof(sb.string_spec(), custom_domain_name_spec(gateway_location)), resources=sb.dictof(sb.string_spec(), gateway_resource_spec())).normalise(meta, val)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter) route_name = meta.key_names()['_key_name_0'] val = sb.create_spec(DNSRoute , name = sb.overridden(route_name) , zone = formatted_string , record_type = sb.string_choice_spec(["CNAME"]) , record_target = formatted_string ).normalise(meta, val) if not val.zone.endswith("."): val.zone = "{0}.".format(val.zone) if not isinstance(val.record_target, six.string_types): if not hasattr(val.record_target, "cname"): raise BadSpecValue("record_target must point at an object with a cname property", got=type(val.record_target), meta=meta) val.record_target = val.record_target.cname return val
async def zones_from_reference(target, reference, afr=sb.NotSpecified, **kwargs): """ Return a dictionary of {serial: [(zone_index, colors), ...]} for the provided reference We assume all the devices support multizone """ final = {} msg = MultiZoneMessages.GetColorZones(start_index=0, end_index=255) options = MergedOptions.using({"timeout": 5}, kwargs).as_dict() by_serial = defaultdict(list) async for pkt, _, _ in target.script(msg).run_with(reference, afr, **options): by_serial[pkt.serial].append(pkt) for serial, pkts in by_serial.items(): final[serial] = [] for p in pkts: if p | MultiZoneMessages.StateMultiZone: for i, color in enumerate(p.colors): final[serial].append((p.zone_index + i, color)) return final
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) role_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(permission_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(permission_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(permission_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) allow_to_assume_me = sb.listof(trust_dict("principal")).normalise(meta.at("allow_to_assume_me"), val.get("allow_to_assume_me", NotSpecified)) disallow_to_assume_me = sb.listof(trust_dict("notprincipal")).normalise(meta.at("disallow_to_assume_me"), val.get("disallow_to_assume_me", NotSpecified)) val = val.wrapped() val['trust'] = allow_to_assume_me + disallow_to_assume_me val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Role , name = sb.overridden(role_name) , description = formatted_string , trust = sb.container_spec(Document, sb.listof(trust_statement_spec('role', role_name))) , permission = sb.container_spec(Document, sb.listof(permission_statement_spec('role', role_name))) , make_instance_profile = sb.defaulted(sb.boolean(), False) ).normalise(meta, val)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) key_name = meta.key_names()['_key_name_0'] key = sb.create_spec(EncryptionKey , name = sb.overridden(key_name) , location = sb.required(formatted_string) , description = formatted_string , grant = sb.listof(grant_statement_spec('key', key_name)) , admin_users = sb.listof(sb.any_spec()) , permission = sb.listof(sb.dictionary_spec()) , no_root_access = sb.defaulted(sb.boolean(), False) ).normalise(meta, val) statements = key.permission if not key.no_root_access: statements.append({"principal": {"iam": "root"}, "action": "kms:*", "resource": "*", "Sid": ""}) if key.admin_users: for admin_user in key.admin_users: statements.append({"principal": admin_user, "action": "kms:*", "resource": { "kms": "__self__" }, "Sid": ""}) key.policy = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('key', key_name))).normalise(meta.at("admin_users"), statements) return key
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(resource_policy_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(resource_policy_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) # require_mfa_to_delete is an alias for this permission if val.get("require_mfa_to_delete") is True: delete_policy = {"action": "s3:DeleteBucket", "resource": { "s3": "__self__" }, "Condition": { "Bool": { "aws:MultiFactorAuthPresent": True } } } normalised_delete_policy = resource_policy_dict(effect='Allow').normalise(meta.at("require_mfa_to_delete"), delete_policy) allow_permission.append(normalised_delete_policy) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Bucket , acl = sb.defaulted(sb.match_spec((six.string_types, canned_acl_spec()), (dict, acl_statement_spec('acl', 'acl'))), None) , name = sb.overridden(bucket_name) , location = sb.defaulted(formatted_string, None) , permission = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('bucket', bucket_name))) , tags = sb.dictof(sb.string_spec(), formatted_string) , website = sb.defaulted(website_statement_spec("website", "website"), None) , logging = sb.defaulted(logging_statement_spec("logging", "logging"), None) , lifecycle = sb.defaulted(sb.listof(lifecycle_statement_spec("lifecycle", "lifecycle")), None) ).normalise(meta, val)
def add_configuration(self, configuration, collect_another_source, done, result, src): """Used to add a file to the configuration, result here is the yaml.load of the src""" def make_mtime_func(source): """Lazily calculate the mtime to avoid wasted computation""" return lambda context: self.get_committime_or_mtime( context, source) if "harpoon" in result: if "extra_files" in result["harpoon"]: spec = sb.listof( sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter)) meta = Meta(MergedOptions.using(result), []).at("harpoon").at("extra_files") for extra in spec.normalise(meta, result["harpoon"]["extra_files"]): if os.path.abspath(extra) not in done: if not os.path.exists(extra): raise BadConfiguration( "Specified extra file doesn't exist", extra=extra, source=src) collect_another_source(extra) if "images" in result and "__images_from__" in result["images"]: images_from_path = result["images"]["__images_from__"] if isinstance(images_from_path, six.string_types): images_from_path = [images_from_path] for ifp in images_from_path: if not ifp.startswith("/"): ifp = os.path.join(os.path.dirname(src), ifp) if not os.path.exists(ifp) or not os.path.isdir(ifp): raise self.BadConfigurationErrorKls( "Specified folder for other configuration files points to a folder that doesn't exist", path="images.__images_from__", value=ifp) for root, dirs, files in os.walk(ifp): for fle in files: location = os.path.join(root, fle) if fle.endswith(".yml") or fle.endswith(".yaml"): collect_another_source( location, prefix=[ "images", os.path.splitext(os.path.basename(fle))[0] ], extra={"mtime": make_mtime_func(location)}) del result["images"]["__images_from__"] if "mtime" not in result: result["mtime"] = make_mtime_func(src) configuration.update(result, source=src)
def extra_prepare(self, configuration, args_dict): """Called before the configuration.converters are activated""" harpoon = MergedOptions.using( configuration.get('harpoon', MergedOptions()).as_dict(), dict(args_dict.get("harpoon", MergedOptions()).items())).as_dict() # Args_dict may itself be a MergedOptions while "harpoon" in args_dict: del args_dict["harpoon"] # Create the addon getter and register the crosshair namespace self.addon_getter = AddonGetter() self.addon_getter.add_namespace("harpoon.crosshairs", Result.FieldSpec(), Addon.FieldSpec()) # Initiate the addons from our configuration self.register = Register(self.addon_getter, self) if ("addons" in harpoon) and ( type(harpoon["addons"]) in (MergedOptions, dict) or getattr(harpoon["addons"], "is_dict", False)): for namespace, adns in sb.dictof( sb.string_spec(), sb.listof(sb.string_spec())).normalise( Meta(harpoon, []).at("addons"), harpoon["addons"]).items(): self.register.add_pairs(*[(namespace, adn) for adn in adns]) # Import our addons self.register.recursive_import_known() # Resolve our addons self.register.recursive_resolve_imported() # Make sure images is started if "images" not in self.configuration: self.configuration["images"] = {} # Add our special stuff to the configuration self.configuration.update( { "$@": harpoon.get("extra", ""), "bash": args_dict["bash"] or NotSpecified, "harpoon": harpoon, "command": args_dict['command'] or NotSpecified, "assume_role": args_dict["assume_role"] or NotSpecified }, source="<args_dict>")
def alter_clone_args_dict(self, new_collector, new_args_dict, options=None): return MergedOptions.using( new_args_dict, {"photons_app": self.configuration["photons_app"].as_dict()}, options or {})
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) role_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(permission_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(permission_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( permission_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) allow_to_assume_me = sb.listof(trust_dict("principal")).normalise( meta.at("allow_to_assume_me"), val.get("allow_to_assume_me", NotSpecified)) disallow_to_assume_me = sb.listof( trust_dict("notprincipal")).normalise( meta.at("disallow_to_assume_me"), val.get("disallow_to_assume_me", NotSpecified)) if not allow_to_assume_me and not disallow_to_assume_me: raise BadSpecValue( "Roles must have either allow_to_assume_me or disallow_to_assume_me specified", meta=meta) val = val.wrapped() val['trust'] = allow_to_assume_me + disallow_to_assume_me val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Role, name=sb.overridden(role_name), description=formatted_string, attached_policies=sb.listof(formatted_string), trust=sb.container_spec( Document, sb.listof(trust_statement_spec('role', role_name))), permission=sb.container_spec( Document, sb.listof(permission_statement_spec('role', role_name))), make_instance_profile=sb.defaulted(sb.boolean(), False)).normalise(meta, val)
def make_options(self): """Get all the found files into a MergedOptions object and default global""" options = MergedOptions() for key, values_list in self.found.items(): options[key] = MergedOptions.using(*values_list) if 'global' not in options: options["global"] = MergedOptions() return options
def convert_stack(path, val): log.info("Converting %s", path) configuration.converters.started(path) environment = configuration['bespin'].environment config_as_dict = configuration.as_dict(ignore=["stacks"]) val_as_dict = val.as_dict(ignore=["stacks"]) if not environment or environment is NotSpecified: raise BespinError("No environment was provided", available=list( configuration["environments"].keys())) env = configuration[["environments", environment]] if isinstance(env, six.string_types): environment_as_dict = configuration[["environments", env]].as_dict() env = configuration[["environments", env]] else: environment_as_dict = configuration[[ "environments", environment ]].as_dict() stack_environment = {} stack_environment_as_dict = {} if ["stacks", stack, environment] in configuration: stack_environment = configuration[[ "stacks", stack, environment ]] stack_environment_as_dict = stack_environment.as_dict() # `base` is used for the majority of the values base = path.configuration.root().wrapped() base.update(config_as_dict) base.update(val_as_dict) base.update(environment_as_dict) base.update(stack_environment_as_dict) # `everything` is used for formatting options # Ideally it matches base # The difference here is that we want to maintain source information everything = path.configuration.root().wrapped() everything[path] = MergedOptions.using(configuration, val, env, stack_environment) for thing in (base, everything): thing["bespin"] = configuration["bespin"] thing["environment"] = environment thing["configuration"] = configuration thing["__stack__"] = val thing["__environment__"] = configuration["environments"][ environment] thing["__stack_name__"] = stack meta = Meta(everything, [("stacks", ""), (stack, "")]) return bespin_spec.stack_spec.normalise(meta, base)
def check_formatting(self, configuration, path, value=NotSpecified, expected=NotSpecified, **configuration_kwargs): if expected is NotSpecified: assert False, "Tester must specify what is expected" if not isinstance(configuration, MergedOptions): configuration = MergedOptions.using(configuration, **configuration_kwargs) kwargs = {} if value is not NotSpecified: kwargs['value'] = value formatter = MergedOptionStringFormatter(configuration, path, **kwargs) self.assertEqual(formatter.format(), expected)
def convert_passwords(path, val): log.info("Converting %s", path) password = str(path)[len("passwords."):] configuration.converters.started(path) environment = configuration['bespin'].environment val_as_dict = configuration["passwords"][password].as_dict() if not environment: raise BespinError("No environment was provided", available=list( configuration["environments"].keys())) password_environment_as_dict = {} if ["passwords", password, environment] in configuration: password_environment_as_dict = configuration[[ "passwords", password, environment ]].as_dict() base = MergedOptions(dont_prefix=path.configuration.dont_prefix, converters=path.configuration.converters) everything = path.configuration.root().wrapped() base.update(val_as_dict) everything[path] = val_as_dict base.update(password_environment_as_dict) everything[path].update(password_environment_as_dict) for thing in (base, everything): thing["__password__"] = val thing["__environment__"] = configuration["environments"][ environment] meta = Meta(everything, [("passwords", ""), (password, "")]) return bespin_spec.password_spec.normalise(meta, base)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] val = sb.create_spec(Lambda, name=sb.overridden(function_name), role=sb.required( only_one_spec( resource_spec("lambda", function_name, only=["iam"]))), code=sb.required(function_code_spec()), handler=function_handler_spec(), timeout=sb.integer_spec(), runtime=sb.required(formatted_string), location=sb.required(formatted_string), description=formatted_string, sample_event=sb.defaulted( sb.or_spec(formatted_dictionary(), sb.string_spec()), ""), desired_output_for_test=sb.defaulted( sb.or_spec(formatted_dictionary(), sb.string_spec()), ""), memory_size=sb.defaulted(divisible_by_spec(64), 128)).normalise( meta, val) # Hack to make sample_event and desired_output_for_test not appear as a MergedOptions for key in ('sample_event', 'desired_output_for_test'): if isinstance(val[key], MergedOptions): v = val[key].as_dict() class Arbritrary(dictobj): fields = list(v.keys()) val[key] = Arbritrary(**v) return val
def get_field(self, value, args, kwargs, format_spec=None): """Also take the spec into account""" if format_spec in ("env", ): return value, () if self.option_path is None: this = self.all_options else: this = self.all_options.get(self.option_path) options = MergedOptions.using(self.all_options, {"this": this}) if value in self.chain: raise BadOptionFormat("Recursive option", chain=self.chain + [value]) return MergedOptionStringFormatter(options, value, chain=self.chain + [value]).format(), ()
class StackResolver(object): def __init__(self): self.registered = MergedOptions() def register(self, stack_kls, extra_aliases=None): """Register a stack type""" aliases = list(getattr(stack_kls, "aliases", [])) if extra_aliases: aliases.extend(extra_aliases) if not aliases: raise BadStackKls("No alias provided", kls=stack_kls) self.registered.update({alias: stack_kls for alias in aliases}) def register_import(self, import_line, extra_aliases=None): """Register a kls from an import string""" if ":" not in import_line: raise BadImport("Expecting '<path>:<obj>'", got=import_line) path, obj = import_line.split(":") if not valid_python_path(path): raise BadImport("Path portion of import is not a valid python name", path=path) if not valid_python_name(obj): raise BadImport("obj portion of import is not a valid python name", obj=obj) obj = do_import(path, obj) self.register(obj, extra_aliases) def register_defaults(self): self.register_import("cloudcity.resolution.types.config:ConfigStack") def resolve(self, name, options): the_type = options.get("type", "config") if the_type not in self.registered: raise UnknownStackType(name=name, only_have=self.registered.keys(), wanted=the_type) return self.registered[the_type](name, options)
def extra_prepare(self, configuration, args_dict): """Called before the configuration.converters are activated""" bespin = dict(args_dict.get("bespin", MergedOptions()).items()) while "bespin" in args_dict: del args_dict["bespin"] environment = bespin.get("environment") bespin["configuration"] = configuration self.configuration.update( { "$@": bespin["extra"], "bespin": bespin, "command": args_dict['command'], "environment": environment }, source="<args_dict>")
def add_configuration(self, configuration, collect_another_source, done, result, src): """ Used to add a file to the configuration, result here is the yaml.load of the src. If the configuration we're reading in has ``photons_app.extra_files`` then this is treated as a list of strings of other files to collect. """ # Make sure to maintain the original config_root if "config_root" in configuration: # if we already have a config root then we only keep new config root if it's not the home location # i.e. if it is the home configuration, we don't delete the new config_root if configuration["config_root"] != os.path.dirname( self.home_dir_configuration_location()): if "config_root" in result: del result["config_root"] config_root = configuration.get("config_root") if config_root and src.startswith(config_root): src = "{{config_root}}/{0}".format(src[len(config_root) + 1:]) configuration.update(result, source=src) if "photons_app" in result: if "extra_files" in result["photons_app"]: spec = sb.listof( sb.formatted(sb.string_spec(), formatter=MergedOptionStringFormatter)) config_root = { "config_root": result.get("config_root", configuration.get("config_root")) } meta = Meta(MergedOptions.using(result, config_root), []).at("photons_app").at("extra_files") for extra in spec.normalise( meta, result["photons_app"]["extra_files"]): if os.path.abspath(extra) not in done: if not os.path.exists(extra): raise BadConfiguration( "Specified extra file doesn't exist", extra=extra, source=src) collect_another_source(extra)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof( resource_policy_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( resource_policy_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Bucket, name=sb.overridden(bucket_name), location=sb.required(formatted_string), permission=sb.container_spec( Document, sb.listof(resource_policy_statement_spec( 'bucket', bucket_name))), tags=sb.dictof(sb.string_spec(), formatted_string)).normalise(meta, val)
def make_image(self, options, harpoon_options=None): config_root = self.make_temp_dir() if harpoon_options is None: harpoon_options = {} harpoon_options["docker_context"] = self.docker_client harpoon_options["docker_context_maker"] = self.new_docker_client harpoon = HarpoonSpec().harpoon_spec.normalise(Meta({}, []), harpoon_options) if "harpoon" not in options: options["harpoon"] = harpoon everything = MergedOptions.using({"harpoon": harpoon, "mtime": mtime, "_key_name_1": "awesome_image", "config_root": config_root}) harpoon_converter = Converter(convert=lambda *args: harpoon, convert_path=["harpoon"]) everything.add_converter(harpoon_converter) everything.converters.activate() if "configuration" not in options: options["configuration"] = everything return HarpoonSpec().image_spec.normalise(Meta(everything, []), options)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) gateway_name = meta.key_names()['_key_name_0'] gateway_location = formatted_string().normalise(meta.at('location'), val.get('location', '')) return sb.create_spec(Gateway , name = sb.overridden(gateway_name) , location = sb.required(formatted_string()) , stages = sb.listof(formatted_string()) , api_keys = sb.listof(api_key_spec()) , domain_names = sb.dictof(sb.string_spec(), custom_domain_name_spec(gateway_location)) , resources = sb.listof(gateway_resource_spec()) ).normalise(meta, val)
def resolve(self, options, resolve_order): """Go through and re-add parts of the options as according to global.resolve_order""" new_options = MergedOptions.using({"global": options.get("global", {})}) for key in options.keys(): new_values = MergedOptions() current_values = options[key] if current_values.get("no_resolve", False): new_values.update(current_values) else: for part in resolve_order: if not part: new_values.update(current_values) else: val = current_values.get(part) if val: new_values.update(val) new_options[key] = new_values new_options["global"]["resolve_order"] = resolve_order return new_options
def run(self, overview, cli_args, image, available_tasks=None): """Run this task""" if available_tasks is None: from harpoon.tasks import available_tasks task_func = available_tasks[self.action] configuration = MergedOptions.using( overview.configuration, dont_prefix=overview.configuration.dont_prefix, converters=overview.configuration.converters) if self.options: if image: configuration.update({"images": {image: self.options}}) else: configuration.update(self.options) configuration.update(cli_args, source="<cli>") if self.overrides: overrides = {} for key, val in self.overrides.items(): overrides[key] = val if isinstance(val, MergedOptions): overrides[key] = dict(val.items()) overview.configuration.update(overrides) images = None if task_func.needs_images: images = self.determine_image(image, overview, configuration, needs_image=task_func.needs_image) if image: image = images[image] if image: image.find_missing_env() return task_func(overview, configuration, images=images, image=image)
def setup(self): """Raise errors if the definition doesn't make sense""" if "use" in self.definition: template = self.definition["use"] if not self.templates: raise NoTemplates(name=self.name, looking_for_template=template, available=self.templates.keys()) if template not in self.templates: raise CantFindTemplate(name=self.name, looking_for_template=template, available=self.templates.keys()) self.definition = MergedOptions.using(self.templates[template], self.definition) self.description = self.definition.get("description", "No description provided!") for statement in listified(self.definition, "allow_to_assume_me"): self.trust.extend(self.statements.expand_trust_statement(statement, allow=True)) for statement in listified(self.definition, "disallow_to_assume_me"): self.distrust.extend(self.statements.expand_trust_statement(statement, allow=False)) for key, default_allow in (("permission", None), ("allow_permission", True), ("deny_permission", False)): for policy in listified(self.definition, key): for statement in self.statements.make_permission_statements(policy, allow=default_allow): self.permission.append(statement)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_spec(), MergedOptionStringFormatter) route_name = meta.key_names()['_key_name_0'] val = sb.create_spec(DNSRoute, name=sb.overridden(route_name), zone=formatted_string, record_type=sb.string_choice_spec(["CNAME"]), record_target=formatted_string).normalise( meta, val) if not val.zone.endswith("."): val.zone = "{0}.".format(val.zone) if not isinstance(val.record_target, six.string_types): if not hasattr(val.record_target, "cname"): raise BadSpecValue( "record_target must point at an object with a cname property", got=type(val.record_target), meta=meta) val.record_target = val.record_target.cname return val
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) function_name = meta.key_names()['_key_name_0'] return sb.create_spec(Lambda , name = sb.overridden(function_name) , role = sb.required(only_one_spec(resource_spec("lambda", function_name, only=["iam"]))) , code = sb.required(function_code_spec()) , handler = function_handler_spec() , timeout = sb.integer_spec() , runtime = sb.required(formatted_string) , location = sb.required(formatted_string) , description = formatted_string , sample_event = sb.defaulted(sb.or_spec(sb.dictionary_spec(), sb.string_spec()), "") , memory_size = sb.defaulted(divisible_by_spec(64), 128) ).normalise(meta, val)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise(meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof(resource_policy_dict(effect='Deny')).normalise(meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof(resource_policy_dict(effect='Allow')).normalise(meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec(Bucket , name = sb.overridden(bucket_name) , location = sb.required(formatted_string) , permission = sb.container_spec(Document, sb.listof(resource_policy_statement_spec('bucket', bucket_name))) , tags = sb.dictof(sb.string_spec(), formatted_string) ).normalise(meta, val)
def normalise(self, meta, val): if 'use' in val: template = val['use'] if template not in meta.everything['templates']: available = list(meta.everything['templates'].keys()) raise BadTemplate("Template doesn't exist!", wanted=template, available=available, meta=meta) val = MergedOptions.using(meta.everything['templates'][template], val) formatted_string = sb.formatted(sb.string_or_int_as_string_spec(), MergedOptionStringFormatter, expected_type=six.string_types) bucket_name = meta.key_names()['_key_name_0'] original_permission = sb.listof(resource_policy_dict()).normalise( meta.at("permission"), NotSpecified if "permission" not in val else val["permission"]) deny_permission = sb.listof( resource_policy_dict(effect='Deny')).normalise( meta.at("deny_permission"), NotSpecified if "deny_permission" not in val else val["deny_permission"]) allow_permission = sb.listof( resource_policy_dict(effect='Allow')).normalise( meta.at("allow_permission"), NotSpecified if "allow_permission" not in val else val["allow_permission"]) # require_mfa_to_delete is an alias for this permission if val.get("require_mfa_to_delete") is True: delete_policy = { "action": "s3:DeleteBucket", "resource": { "s3": "__self__" }, "Condition": { "Bool": { "aws:MultiFactorAuthPresent": True } } } normalised_delete_policy = resource_policy_dict( effect='Allow').normalise(meta.at("require_mfa_to_delete"), delete_policy) allow_permission.append(normalised_delete_policy) val = val.wrapped() val['permission'] = original_permission + deny_permission + allow_permission return sb.create_spec( Bucket, acl=sb.defaulted( sb.match_spec((six.string_types, canned_acl_spec()), (dict, acl_statement_spec('acl', 'acl'))), None), name=sb.overridden(bucket_name), location=sb.defaulted(formatted_string, None), permission=sb.container_spec( Document, sb.listof(resource_policy_statement_spec( 'bucket', bucket_name))), tags=sb.dictof(sb.string_spec(), formatted_string), website=sb.defaulted(website_statement_spec("website", "website"), None), logging=sb.defaulted(logging_statement_spec("logging", "logging"), None), lifecycle=sb.defaulted( sb.listof(lifecycle_statement_spec("lifecycle", "lifecycle")), None)).normalise(meta, val)
describe CommandCase, "array_command_spec": before_each: self.spec = cs.array_command_spec() it "complains if it's a one item value": command = ["ENV 1"] with self.fuzzyAssertRaisesError(BadSpecValue, "Expected a value but got none", meta=self.meta): self.spec.normalise(self.meta, command) it "returns multiple commands if second value is an array": command = ["ENV", ["ONE", "TWO", "THREE"]] self.assertDockerLines(command, ["ENV ONE", "ENV TWO", "ENV THREE"]) it "formats second list": everything = MergedOptions.using({"one": 1, "two": 2}) self.meta.indexed_at(0).everything = everything self.meta.indexed_at(1).everything = everything command = ["ENV", "ONE {one}"] self.assertDockerLines(command, ["ENV ONE 1"]) command = ["ENV", ["ONE {one}", "TWO {two}"]] self.assertDockerLines(command, ["ENV ONE 1", "ENV TWO 2"]) it "uses complex_ADD_spec if the second value is a dictionary": second_val = {self.unique_val(): self.unique_val()} normalised = [Command((self.unique_val(), "one")), Command((self.unique_val(), "two")), Command((self.unique_val(), "three"))] normalise = mock.Mock(name="normalise", return_value=normalised) command = [self.unique_val(), second_val]
from tests.helpers import HarpoonCase from noseOfYeti.tokeniser.support import noy_sup_setUp from input_algorithms.meta import Meta from option_merge import MergedOptions import mock describe HarpoonCase, "HarpoonSpec": before_each: self.meta = mock.Mock(name="meta") it "can get a fake Image": with self.a_temp_dir() as directory: harpoon = mock.Mock(name="harpoon") everything = MergedOptions.using({"config_root": directory, "_key_name_1": "blah", "harpoon": harpoon}) meta = Meta(everything, []) fake = HarpoonSpec().image_spec.fake_filled(meta, with_non_defaulted=True) self.assertEqual(fake.context.parent_dir, directory) self.assertEqual(fake.name, "blah") as_dict = fake.as_dict() self.assertEqual(type(as_dict["context"]), dict) self.assertEqual(sorted(as_dict["context"].keys()), sorted(["enabled", "use_git_timestamps", "use_gitignore", "exclude", "include", "parent_dir"])) describe "name_spec": # Shared tests for image_name_spec and task_name_spec __only_run_tests_in_children__ = True @property def spec(self):
def start_configuration(self): """Create the base of the configuration""" return MergedOptions(dont_prefix=[dictobj])
describe CommandCase, "array_command_spec": before_each: self.spec = cs.array_command_spec() it "complains if it's a one item value": command = ["ENV 1"] with self.fuzzyAssertRaisesError(BadSpecValue, "The value is a list with the wrong number of items", meta=self.meta): self.spec.normalise(self.meta, command) it "returns multiple commands if second value is an array": command = ["ENV", ["ONE", "TWO", "THREE"]] self.assertDockerLines(command, ["ENV ONE", "ENV TWO", "ENV THREE"]) it "formats second list": everything = MergedOptions.using({"one": 1, "two": 2}) self.meta.everything = everything command = ["ENV", "ONE {one}"] self.assertDockerLines(command, ["ENV ONE 1"]) command = ["ENV", ["ONE {one}", "TWO {two}"]] self.assertDockerLines(command, ["ENV ONE 1", "ENV TWO 2"]) it "uses complex_ADD_spec if the second value is a dictionary with ADD": second_val = {self.unique_val(): self.unique_val()} normalised = [Command((self.unique_val(), "one")), Command((self.unique_val(), "two")), Command((self.unique_val(), "three"))] normalise = mock.Mock(name="normalise", return_value=normalised) command = ["ADD", second_val] with mock.patch.object(cs.complex_ADD_spec, "normalise", normalise):
describe CommandCase, "array_command_spec": before_each: self.spec = cs.array_command_spec() it "complains if it's a one item value": command = ["ENV 1"] with self.fuzzyAssertRaisesError(BadSpecValue, "The value is a list with the wrong number of items", meta=self.meta): self.spec.normalise(self.meta, command) it "returns multiple commands if second value is an array": command = ["ENV", ["ONE", "TWO", "THREE"]] self.assertDockerLines(command, ["ENV ONE", "ENV TWO", "ENV THREE"]) it "formats second list": everything = MergedOptions.using({"one": 1, "two": 2}) self.meta.everything = everything command = ["ENV", "ONE {one}"] self.assertDockerLines(command, ["ENV ONE 1"]) command = ["ENV", ["ONE {one}", "TWO {two}"]] self.assertDockerLines(command, ["ENV ONE 1", "ENV TWO 2"]) it "uses complex_ADD_spec if the second value is a dictionary": second_val = {self.unique_val(): self.unique_val()} normalised = [Command((self.unique_val(), "one")), Command((self.unique_val(), "two")), Command((self.unique_val(), "three"))] normalise = mock.Mock(name="normalise", return_value=normalised) command = [self.unique_val(), second_val] with mock.patch.object(cs.complex_ADD_spec, "normalise", normalise):
with mock.patch.multiple(collector , find_photons_app_options = find_photons_app_options , determine_mainline_module = determine_mainline_module , setup_addon_register = setup_addon_register ): yield __main__ find_photons_app_options.assert_called_once_with(configuration, args_dict) determine_mainline_module.assert_called_once_with() setup_addon_register.assert_called_once_with(photons_app, __main__) it "puts things into the configuration and sets up the addon register": extra = str(uuid.uuid1()) photons_app = {"extra": extra} configuration = MergedOptions() collector = Collector() register = mock.Mock(name="register") args_dict = mock.Mock(name="args_dict") with self.mocks(collector, configuration, args_dict, photons_app, register): collector.extra_prepare(configuration, args_dict) class AFuture: def __eq__(self, other): return isinstance(other, asyncio.Future) self.assertIs(collector.register, register) self.assertEqual(configuration.as_dict() , { "$@": extra , "collector": collector
def encrypt_certificate(collector): """Write encrypted values for your certificate to the configuration""" configuration = collector.configuration amazon = configuration['amazon'] aws_syncr = configuration['aws_syncr'] certificate = aws_syncr.artifact available = [] for gateway_name, gateway in configuration.get('apigateway', {}, ignore_converters=True).items(): for name, options in gateway.get("domain_names", {}).items(): if "zone" in options: location = '.'.join(['apigateway', gateway_name, 'domain_names']) formatter = MergedOptionStringFormatter(configuration, location, value=options['zone']) available.append((gateway_name, "{0}.{1}".format(name, formatter.format()))) if not available: raise AwsSyncrError("Please specify apigateway.<gateway_name>.domain_names.<domain_name>.name in the configuration") if not certificate: raise AwsSyncrError("Please specify certificate to encrypt with --artifact", available=[a[1] for a in available]) if certificate not in [a[1] for a in available]: raise AwsSyncrError("Unknown certificate", available=[a[1] for a in available], got=certificate) gateway = [name for name, cert in available if cert == certificate][0] location, source = find_certificate_source(configuration, gateway, certificate) log.info("Gonna edit {0} in {1}".format(location, source)) current = MergedOptions.using(yaml.load(open(source))) dest = current[location] try: key_id = input("Which kms key do you want to use? ") region = input("What region is this key in? ") except EOFError: raise UserQuit() # Make the filename completion work setup_completer() # Create the datakey to encrypt with data_key = amazon.kms.generate_data_key(region, key_id) plaintext_data_key = data_key["Plaintext"] encrypted_data_key = base64.b64encode(data_key["CiphertextBlob"]).decode('utf-8') # Encrypt our secrets secrets = {} for name, desc in (("body", "certificate's crt file"), ("key", "private key file"), ("chain", "certificate chain")): location = None while not location or not os.path.isfile(location): location = os.path.expanduser(filename_prompt("Where is the {0}? ".format(desc))) if not location or not os.path.isfile(location): print("Please give a location to a file that exists!") data = open(location).read() counter = Counter.new(128) encryptor = AES.new(plaintext_data_key[:32], AES.MODE_CTR, counter=counter) secrets[name] = base64.b64encode(encryptor.encrypt(data)).decode('utf-8') # Add in the encrypted values dest['body'] = {"kms": secrets['body'], "location": region, "kms_data_key": encrypted_data_key} dest['key'] = {"kms": secrets['key'], "location": region, "kms_data_key": encrypted_data_key} dest['chain'] = {"kms": secrets['chain'], "location": region, "kms_data_key": encrypted_data_key} # And write to the file! yaml.dump(current.as_dict(), open(source, 'w'), explicit_start=True, indent=2, default_flow_style=False)
def __init__(self): self.registered = MergedOptions()
def prepare(self, npm_deps, compiled_static_folder): ctxt = docker_context() harpoon_options = { "docker_context": ctxt , "no_intervention": True , "docker_context_maker": docker_context } self.harpoon = HarpoonSpec().harpoon_spec.normalise(Meta({}, []), harpoon_options) config_root = pkg_resources.resource_filename(__package__, "") deps = self.default_npm_deps() deps.update(npm_deps) image_name = "dashmat-jsx-builder" everything = MergedOptions(dont_prefix=[dictobj]) everything.update( { "harpoon": self.harpoon , "config_root": config_root , "images": { image_name: { "harpoon": self.harpoon , "configuration": everything , "volumes": { "mount": [ [ compiled_static_folder, "/compiled" ] ] } , "context": { "enabled" : False } , "persistence": { "action": "npm install && npm dedup" , "folders": ["/project/node_modules", "/usr/lib/node_modules"] } , "commands": [ "FROM gliderlabs/alpine:3.2" , "RUN apk-install bash nodejs" , "RUN npm install -g npm" , "RUN mkdir /project" , "WORKDIR /project" , [ "ADD" , { "dest": "/project/package.json" , "content": json.dumps( { "name": "dashmat" , "version": "0.1.0" , "dependencies": collections.OrderedDict(sorted(deps.items())) } , sort_keys=True ) , "mtime": mtime } ] ] } } } ) def convert_image(path, val): meta = Meta(everything, [(part, "") for part in path.path]) return HarpoonSpec().image_spec.normalise(meta, val) everything.add_converter(Converter(convert=convert_image, convert_path=["images", image_name])) everything.converters.activate() self.image = everything[["images", image_name]] Builder().make_image(self.image, {self.image.name: self.image})
def start_configuration(self): return MergedOptions.using({}) def read_file(self, location): return json.load(open(location))
def start_configuration(slf): called.append(0); return MergedOptions.using({}) def read_file(slf, location): called.append((1, location)); raise BadJson(location=location)
called = [] class C(Collector): def setup(self): called.append(1) self.assertEqual(called, []) C() self.assertEqual(called, [1]) describe "register_converters": it "adds converters": meta = mock.Mock(name='meta') Meta = mock.Mock(name="Meta", return_value=meta) NotSpecified = mock.Mock(name="NotSpecified") configuration = MergedOptions.using({"two": 2, "three": 3}) spec1 = mock.Mock(name="spec1") spec1.normalise.return_value = "ONE" spec2 = mock.Mock(name="spec2") spec2.normalise.return_value = "TWO" specs = {(0, ("one", )): spec1, (0, ("two", )): spec2} collector = Collector() collector.register_converters(specs, Meta, configuration, NotSpecified) configuration.converters.activate() self.assertEqual(configuration["one"], "ONE") self.assertEqual(configuration["two"], "TWO")
import mock describe BespinCase, "Var spec": before_each: self.meta = mock.Mock(name="meta", spec=Meta) it "creates a Static variable if only one item is given": self.assertEqual(specs.var_spec().normalise(self.meta, 1), objs.StaticVariable("1")) self.assertEqual(specs.var_spec().normalise(self.meta, "1"), objs.StaticVariable("1")) self.assertEqual(specs.var_spec().normalise(self.meta, ["1"]), objs.StaticVariable("1")) it "creates a Dynamic variable if only one item is given": stack = self.unique_val() output = self.unique_val() bespin = self.unique_val() self.meta.everything = MergedOptions.using({"bespin": bespin}) self.assertEqual(specs.var_spec().normalise(self.meta, [stack, output]), objs.DynamicVariable(stack, output, bespin)) describe BespinCase, "artifact_path_spec": before_each: self.meta = mock.Mock(name="artifact_path_spec") it "creates an artifact_path from the two items": host_path = self.unique_val() artifact_path = self.unique_val() self.assertEqual(specs.artifact_path_spec().normalise(self.meta, [host_path, artifact_path]), artifact_objs.ArtifactPath(host_path, artifact_path)) describe BespinCase, "Env spec": before_each: self.meta = mock.Mock(name="meta", spec=Meta) self.env_name = self.unique_val()
it "overrides dictionaries with scalar values": target = {"a":5, "b":2, "c": {"e":7}} source = {"a":1, "b":2, "c": 3} hp.merge_into_dict(target, source) self.assertEqual(target, {"a":1, "b":2, "c": 3}) it "overrides scalar values with dictionaries": target = {"a":1, "b":2, "c": 3} source = {"a":5, "b":2, "c": {"e":7}} hp.merge_into_dict(target, source) self.assertEqual(target, {"a":5, "b":2, "c": {"e": 7}}) describe "with MergedOptions": it "merges a MergedOptions into an empty dictionary": target = {} source = MergedOptions.using({"a":5, "b":2, "c": {"e":7}}) hp.merge_into_dict(target, source) self.assertEqual(target, {"a":5, "b":2, "c": {"e": 7}}) it "merges a MergedOptions into a full dictionary": target = {"a":5, "b":2, "c": {"f":7}} source = MergedOptions.using({"a":1, "b":2, "c": {"e":7}}) hp.merge_into_dict(target, source) self.assertEqual(target, {"a":1, "b":2, "c": {"f": 7, "e": 7}}) it "merges a MergedOptions with prefixed data into an empty dictionary": target = {"a":5, "b":2, "c": {"f":7}} source = MergedOptions() source["c"] = {"e": 7} hp.merge_into_dict(target, source) self.assertEqual(target, {"a":5, "b":2, "c": {"f": 7, "e": 7}})
def collect_configuration(self, configuration_file): """Return us a MergedOptions with this configuration and any collected configurations""" errors = [] result = self.read_yaml(configuration_file) configuration_dir = os.path.dirname( os.path.abspath(configuration_file)) images_from = [] images_from_path = None if "images" in result and "__images_from__" in result["images"]: images_from_path = result["images"]["__images_from__"] if not images_from_path.startswith("/"): images_from_path = os.path.join(configuration_dir, images_from_path) if not os.path.exists(images_from_path) or not os.path.isdir( images_from_path): raise BadConfiguration( "Specified folder for other configuration files points to a folder that doesn't exist", path="images.__images_from__", value=images_from_path) images_from = sorted( chain.from_iterable([[ os.path.join(root, fle) for fle in files if fle.endswith(".yml") or fle.endswith(".yaml") ] for root, dirs, files in os.walk(images_from_path)])) harpoon_spec = HarpoonSpec() configuration = MergedOptions(dont_prefix=[dictobj]) home_dir_configuration = self.home_dir_configuration_location() sources = [home_dir_configuration, configuration_file] + images_from def make_mtime_func(source): """Lazily calculate the mtime to avoid wasted computation""" return lambda: self.get_committime_or_mtime(source) for source in sources: if source is None or not os.path.exists(source): continue try: result = self.read_yaml(source) except BadYaml as error: errors.append(error) continue if "images" in result and "__images_from__" in result["images"]: del result["images"]["__images_from__"] if source in images_from: result = { "images": { os.path.splitext(os.path.basename(source))[0]: result } } result["mtime"] = make_mtime_func(source) if "images" in result: images = result.pop("images") images = dict( (image, MergedOptions.using(configuration.root(), val, converters=configuration.converters, source=source)) for image, val in images.items()) result["images"] = images configuration.update(result, dont_prefix=[dictobj], source=source) for image in result.get('images', {}).keys(): self.make_image_converters(image, configuration, harpoon_spec) def convert_harpoon(path, val): log.info("Converting %s", path) meta = Meta(path.configuration, [("harpoon", "")]) configuration.converters.started(path) return harpoon_spec.harpoon_spec.normalise(meta, val) harpoon_converter = Converter(convert=convert_harpoon, convert_path=["harpoon"]) configuration.add_converter(harpoon_converter) if errors: raise BadConfiguration("Some of the configuration was broken", _errors=errors) return configuration
it "does largest matching first": value = mock.Mock(name="value") value2 = mock.Mock(name="value2") value3 = mock.Mock(name="value3") data = {"blah": {"meh": value}} self.assertEqual(value_at(data, Path("blah.meh")), (Path(["blah", "meh"]), value)) data["blah.meh"] = value2 self.assertEqual(value_at(data, Path("blah.meh")), (Path("blah.meh"), value2)) data["blah.meh"] = {"stuff": value3} self.assertEqual(value_at(data, Path("blah.meh.stuff")), (Path(["blah.meh", "stuff"]), value3)) it "skips misleading paths": value = mock.Mock(name="value") data = {"blah": {"meh": {"stuff": value}}, "blah.meh": {"tree": 3}} self.assertEqual(value_at(data, Path("blah.meh.stuff")), (["blah", "meh", "stuff"], value)) it "skips paths with the same storage": data = MergedOptions.using({"a": "blah"}) self.assertEqual(value_at(data, Path("a")), (Path("a"), "blah")) data["a"] = data["a"] self.assertEqual(value_at(data, Path("a")), (Path("a"), "blah")) it "digs into subclasses of dict": class blah(dict): is_dict = True b = blah({"a":1}) data = MergedOptions.using({"one": b}) self.assertEqual(value_at(data, Path(["one", "a"])), (Path(["one", "a"]), 1))
def find_photons_app_options(self, configuration, args_dict): """Return us all the photons_app options""" d = lambda r: {} if r in (None, "", NotSpecified) else r return MergedOptions.using( dict(d(configuration.get('photons_app')).items()), dict(d(args_dict.get("photons_app")).items())).as_dict()