def _parse_policy_json(self, policy): """Parse a single JSON file into an Amazon policy. Validates that the policy document can be parsed, strips out any comments, and fills in any environmental tokens. Returns a dictionary of the contents. Returns None if the input is None. args: policy: The Policy JSON file to read. returns: A dictionary of the parsed policy. """ if policy is None: return None # Run through any supplied Inline IAM Policies and verify that they're # not corrupt very early on. self.log.debug('Parsing and validating %s' % policy) try: p_doc = utils.convert_script_to_dict(script_file=policy, tokens=self._init_tokens) except kingpin_exceptions.InvalidScript as e: raise exceptions.UnrecoverableActorFailure('Error parsing %s: %s' % (policy, e)) return p_doc
def _parse_policy_json(self, policy): """Parse a single JSON file into an Amazon policy. Validates that the policy document can be parsed, strips out any comments, and fills in any environmental tokens. Returns a dictionary of the contents. Returns None if the input is None. args: policy: The Policy JSON file to read. returns: A dictionary of the parsed policy. """ if policy is None: return None # Run through any supplied Inline IAM Policies and verify that they're # not corrupt very early on. self.log.debug('Parsing and validating %s' % policy) try: p_doc = utils.convert_script_to_dict(script_file=policy, tokens=self._init_tokens) except kingpin_exceptions.InvalidScript as e: raise exceptions.UnrecoverableActorFailure('Error parsing %s: %s' % (policy, e)) return p_doc
def _load_task_definition(task_definition_file, tokens, default_tokens={}): """Loads and verifies a task definition template file, interpolates tokens, and optionally default tokens which may contain environment variables. Args: task_definition_file: task definition file to load, or None. tokens: dict of key/value pairs to interpolate into the file. default_tokens: dict of default key/value pairs to merge with tokens Returns: Resulting task definition dict or None if task_definition_file is None. """ if not task_definition_file: return None # Defined Kingpin tokens will override environment variables. final_tokens = default_tokens.copy() final_tokens.update(tokens) task_definition = utils.convert_script_to_dict( task_definition_file, final_tokens) try: jsonschema.validate(task_definition, TASK_DEFINITION_SCHEMA) except jsonschema.exceptions.ValidationError as e: raise exceptions.InvalidOptions(e) return task_definition
def _load_service_definition( service_definition_file, tokens, default_tokens=None): """Loads and verifies a service definition template file, and interpolates tokens. and optionally default tokens which may contain environment variables. The service definition template file can be None. Args: service_definition_file: service definition file to load. If None or an empty string, this returns only defaults. tokens: dict of key/value pairs to interpolate into the file. default_tokens: dict of default key/value pairs to merge with tokens Returns: Resulting service definition dict. """ if default_tokens is None: default_tokens = {} if not service_definition_file: service_definition = {} else: final_tokens = default_tokens.copy() final_tokens.update(tokens) service_definition = utils.convert_script_to_dict( service_definition_file, final_tokens) try: jsonschema.validate(service_definition, SERVICE_DEFINITION_SCHEMA) except jsonschema.exceptions.ValidationError as e: raise exceptions.InvalidOptions(e) return service_definition
def _load_task_definition(task_definition_file, tokens, default_tokens={}): """Loads and verifies a task definition template file, interpolates tokens, and optionally default tokens which may contain environment variables. Args: task_definition_file: task definition file to load, or None. tokens: dict of key/value pairs to interpolate into the file. default_tokens: dict of default key/value pairs to merge with tokens Returns: Resulting task definition dict or None if task_definition_file is None. """ if not task_definition_file: return None # Defined Kingpin tokens will override environment variables. final_tokens = default_tokens.copy() final_tokens.update(tokens) task_definition = utils.convert_script_to_dict( task_definition_file, final_tokens) try: jsonschema.validate(task_definition, TASK_DEFINITION_SCHEMA) except jsonschema.exceptions.ValidationError as e: raise exceptions.InvalidOptions(e) return task_definition
def _build_actions(self): """Builds either a single set of actions, or multiple sets. If no 'contexts' were passed in, then we simply build the actors that are defined in the 'acts' option for the group. If any 'contexts' were passed in, then this method will create as many groups of actions as there are in the list of contexts. For each dict in the 'contexts' list, a new group of actors is created with that information. Note: Because groups may contain nested group actors, any options passed into this actors 'init_context' are also passed into the actors that we're intantiating. """ contexts = self.option('contexts') if not contexts: return self._build_action_group(self._init_context) # If the data passed into the 'contexts' is a list of dicts, we take it # as is and do nothing to it. if type(contexts) == list: context_data = self.option('contexts') # DEPRECATE IN v0.5.0 elif type(contexts) == dict: context_string = open(contexts['file']).read() context_string = kp_utils.populate_with_tokens( string=context_string, tokens=contexts.get('tokens', {}), strict=True) context_data = demjson.decode(context_string) # END DEPRECATION # If the data passed in is a string, it must be a pointer to a file # with contexts in it. We read that file, and we parse it for any # missing tokens. We use the "init tokens" that made it into this actor # as available token substitutions. elif isinstance(contexts, str): context_data = kp_utils.convert_script_to_dict( contexts, self._init_tokens) actions = [] for context in context_data: combined_context = dict( list(self._init_context.items()) + list(context.items())) self.log.debug('Inherited context %s' % list(self._init_context.items())) self.log.debug('Specified context %s' % list(context.items())) self.log.debug('Building acts with parameters: %s' % combined_context) for action in self._build_action_group(context=combined_context): actions.append(action) return actions
def _build_actions(self): """Builds either a single set of actions, or multiple sets. If no 'contexts' were passed in, then we simply build the actors that are defined in the 'acts' option for the group. If any 'contexts' were passed in, then this method will create as many groups of actions as there are in the list of contexts. For each dict in the 'contexts' list, a new group of actors is created with that information. Note: Because groups may contain nested group actors, any options passed into this actors 'init_context' are also passed into the actors that we're intantiating. """ contexts = self.option('contexts') if not contexts: return self._build_action_group(self._init_context) # If the data passed into the 'contexts' is a list of dicts, we take it # as is and do nothing to it. if type(contexts) == list: context_data = self.option('contexts') # DEPRECATE IN v0.5.0 elif type(contexts) == dict: context_string = open(contexts['file']).read() context_string = kp_utils.populate_with_tokens( string=context_string, tokens=contexts.get('tokens', {}), strict=True) context_data = demjson.decode(context_string) # END DEPRECATION # If the data passed in is a string, it must be a pointer to a file # with contexts in it. We read that file, and we parse it for any # missing tokens. We use the "init tokens" that made it into this actor # as available token substitutions. elif isinstance(contexts, basestring): context_data = kp_utils.convert_script_to_dict( contexts, self._init_tokens) actions = [] for context in context_data: combined_context = dict(self._init_context.items() + context.items()) self.log.debug('Inherited context %s' % self._init_context.items()) self.log.debug('Specified context %s' % context.items()) self.log.debug('Building acts with parameters: %s' % combined_context) for action in self._build_action_group(context=combined_context): actions.append(action) return actions
def test_convert_script_to_dict(self): # Should work with string path to a file dirname, filename = os.path.split(os.path.abspath(__file__)) examples = '%s/../../examples' % dirname simple = '%s/simple.json' % examples ret = utils.convert_script_to_dict(simple, {}) self.assertEqual(type(ret), dict) # Should work with file instance also dirname, filename = os.path.split(os.path.abspath(__file__)) examples = '%s/../../examples' % dirname simple = '%s/simple.json' % examples instance = open(simple) ret = utils.convert_script_to_dict(instance, {}) self.assertEqual(type(ret), dict) # Should definitly support YAML as well dirname, filename = os.path.split(os.path.abspath(__file__)) examples = '%s/../../examples' % dirname simple = '%s/simple.yaml' % examples instance = open(simple) ret = utils.convert_script_to_dict(instance, {}) self.assertEqual(type(ret), dict)
def test_convert_script_to_dict(self): # Should work with string path to a file dirname, filename = os.path.split(os.path.abspath(__file__)) examples = '%s/../../examples' % dirname simple = '%s/simple.json' % examples ret = utils.convert_script_to_dict(simple, {}) self.assertEquals(type(ret), dict) # Should work with file instance also dirname, filename = os.path.split(os.path.abspath(__file__)) examples = '%s/../../examples' % dirname simple = '%s/simple.json' % examples instance = open(simple) ret = utils.convert_script_to_dict(instance, {}) self.assertEquals(type(ret), dict) # Should definitly support YAML as well dirname, filename = os.path.split(os.path.abspath(__file__)) examples = '%s/../../examples' % dirname simple = '%s/simple.yaml' % examples instance = open(simple) ret = utils.convert_script_to_dict(instance, {}) self.assertEquals(type(ret), dict)
def _parse_group_config(self): """Parses the ElastiGroup config and replaces tokens. Reads through the supplied ElastiGroup configuration JSON blob (or YAML!), replaces any tokens that need replacement, and then sanity checks it against our schema. Note, contextual tokens (which are evaluated at run time, not compilation time) are not included here. Instead, those will be evaluated in the self._precache() method. """ config = self.option('config') if config is None: return None self.log.debug('Parsing and validating %s' % config) # Join the init_tokens the class was instantiated with and the explicit # tokens that the user supplied. tokens = dict(self._init_tokens) tokens.update(self.option('tokens')) try: parsed = utils.convert_script_to_dict(script_file=config, tokens=tokens) except (kingpin_exceptions.InvalidScript, LookupError) as e: raise exceptions.InvalidOptions('Error parsing %s: %s' % (config, e)) # The userData portion of the body data needs to be Base64 encoded if # its not already. We will try to decode whatever is there, and if it # fails, we assume its raw text and we encode it. orig_data = ( parsed['group']['compute']['launchSpecification']['userData']) new = base64.b64encode(orig_data) parsed['group']['compute']['launchSpecification']['userData'] = new # Ensure that the name of the ElastiGroup in the config file matches # the name that was supplied to the actor -- or overwrite it. parsed['group']['name'] = self.option('name') # Now run the configuration through the schema validator ElastiGroupSchema.validate(parsed) return parsed
def _parse_group_config(self): """Parses the ElastiGroup config and replaces tokens. Reads through the supplied ElastiGroup configuration JSON blob (or YAML!), replaces any tokens that need replacement, and then sanity checks it against our schema. Note, contextual tokens (which are evaluated at run time, not compilation time) are not included here. Instead, those will be evaluated in the self._precache() method. """ config = self.option('config') if config is None: return None self.log.debug('Parsing and validating %s' % config) # Join the init_tokens the class was instantiated with and the explicit # tokens that the user supplied. tokens = dict(self._init_tokens) tokens.update(self.option('tokens')) try: parsed = utils.convert_script_to_dict( script_file=config, tokens=tokens) except (kingpin_exceptions.InvalidScript, LookupError) as e: raise exceptions.InvalidOptions( 'Error parsing %s: %s' % (config, e)) # The userData portion of the body data needs to be Base64 encoded if # its not already. We will try to decode whatever is there, and if it # fails, we assume its raw text and we encode it. orig_data = (parsed['group']['compute'] ['launchSpecification']['userData']) new = base64.b64encode(orig_data) parsed['group']['compute']['launchSpecification']['userData'] = new # Ensure that the name of the ElastiGroup in the config file matches # the name that was supplied to the actor -- or overwrite it. parsed['group']['name'] = self.option('name') # Now run the configuration through the schema validator ElastiGroupSchema.validate(parsed) return parsed
def test_convert_script_to_dict_junk(self): instance = io.StringIO() instance.name = 'Somefile.json' with self.assertRaises(exceptions.InvalidScript): utils.convert_script_to_dict(instance, {}) with self.assertRaises(exceptions.InvalidScript): utils.convert_script_to_dict('junk data', {}) instance = io.StringIO() instance.name = 'Somefile.yaml' instance.write('---bad-yaml') with self.assertRaises(exceptions.InvalidScript): utils.convert_script_to_dict(instance, {})
def test_convert_script_to_dict_junk(self): instance = StringIO.StringIO() instance.__repr__ = lambda: 'Somefile.json' with self.assertRaises(exceptions.InvalidScript): utils.convert_script_to_dict(instance, {}) with self.assertRaises(exceptions.InvalidScript): utils.convert_script_to_dict('junk data', {}) instance = StringIO.StringIO() instance.__repr__ = lambda: 'Somefile.yaml' instance.write('---bad-yaml') with self.assertRaises(exceptions.InvalidScript): utils.convert_script_to_dict(instance, {})
def _load_service_definition( service_definition_file, tokens, default_tokens={}): """Loads and verifies a service definition template file, and interpolates tokens. and optionally default tokens which may contain environment variables. The service definition template file can be None. Args: service_definition_file: service definition file to load. If None or an empty string, this returns only defaults. tokens: dict of key/value pairs to interpolate into the file. default_tokens: dict of default key/value pairs to merge with tokens Returns: Resulting service definition dict. """ if not service_definition_file: service_definition = {} else: final_tokens = default_tokens.copy() final_tokens.update(tokens) service_definition = utils.convert_script_to_dict( service_definition_file, final_tokens) try: jsonschema.validate(service_definition, SERVICE_DEFINITION_SCHEMA) except jsonschema.exceptions.ValidationError as e: raise exceptions.InvalidOptions(e) # Set default values. service_definition.setdefault( 'loadBalancers', []) service_definition.setdefault( 'deploymentConfiguration', {}) return service_definition
def _get_config_from_script(self, script_file): """Convert a script into a dict() with inserted ENV vars. Run the JSON dictionary through our environment parser and return back a dictionary with all of the %XX% keys swapped out with environment variables. Args: script_file: A path string to a file, or an open() file stream. Returns: Dictionary adhering to our schema. Raises: UnrecoverableActorFailure - if parsing script or inserting env vars fails. """ self.log.debug('Parsing %s' % script_file) try: return utils.convert_script_to_dict(script_file=script_file, tokens=self._init_tokens) except (kingpin_exceptions.InvalidScript, LookupError) as e: raise exceptions.UnrecoverableActorFailure(e)
def _get_config_from_script(self, script_file): """Convert a script into a dict() with inserted ENV vars. Run the JSON dictionary through our environment parser and return back a dictionary with all of the %XX% keys swapped out with environment variables. Args: script_file: A path string to a file, or an open() file stream. Returns: Dictionary adhering to our schema. Raises: UnrecoverableActorFailure - if parsing script or inserting env vars fails. """ self.log.debug('Parsing %s' % script_file) try: return utils.convert_script_to_dict( script_file=script_file, tokens=self._init_tokens) except (kingpin_exceptions.InvalidScript, LookupError) as e: raise exceptions.UnrecoverableActorFailure(e)
def test_convert_script_to_dict_bad_name(self): instance = io.StringIO() # Empty buffer will fail demjson. instance.name = 'Somefile.HAHA' with self.assertRaises(exceptions.InvalidScriptName): utils.convert_script_to_dict(instance, {})
def test_convert_script_to_dict_bad_name(self): instance = StringIO.StringIO() # Empty buffer will fail demjson. instance.__repr__ = lambda: 'Somefile.HAHA' with self.assertRaises(exceptions.InvalidScriptName): utils.convert_script_to_dict(instance, {})