def schema(self, wfjt, node_network=None): """ Convert YAML/JSON content into workflow node objects if node_network param is given. If not, print a YAML representation of the node network. """ existing_network = self._get_schema(wfjt) if not isinstance(existing_network, list): existing_network = [] if node_network is None: if settings.format == 'human': settings.format = 'yaml' return existing_network if hasattr(node_network, 'read'): node_network = node_network.read() node_network = string_to_dict(node_network, allow_kv=False, require_dict=False) if not isinstance(node_network, list): node_network = [] _update_workflow( [TreeNode(x, wfjt, include_id=True) for x in existing_network], [TreeNode(x, wfjt) for x in node_network]) if settings.format == 'human': settings.format = 'yaml' return self._get_schema(wfjt)
def tower_auth_config(module): '''tower_auth_config attempts to load the tower-cli.cfg file specified from the `tower_config_file` parameter. If found, if returns the contents of the file as a dictionary, else it will attempt to fetch values from the module pararms and only pass those values that have been set. ''' config_file = module.params.get('tower_config_file') if config_file: config_file = os.path.expanduser(config_file) if not os.path.exists(config_file): module.fail_json(msg='file not found: %s' % config_file) if os.path.isdir(config_file): module.fail_json(msg='directory can not be used as config file: %s' % config_file) with open(config_file, 'rb') as f: return parser.string_to_dict(f.read()) else: auth_config = {} host = module.params.get('tower_host') if host: auth_config['host'] = host username = module.params.get('tower_username') if username: auth_config['username'] = username password = module.params.get('tower_password') if password: auth_config['password'] = password verify_ssl = module.params.get('tower_verify_ssl') if verify_ssl is not None: auth_config['verify_ssl'] = verify_ssl return auth_config
def tower_auth_config(module): '''tower_auth_config attempts to load the tower-cli.cfg file specified from the `tower_config_file` parameter. If found, if returns the contents of the file as a dictionary, else it will attempt to fetch values from the module pararms and only pass those values that have been set. ''' config_file = module.params.get('tower_config_file') if config_file: config_file = os.path.expanduser(config_file) if not os.path.exists(config_file): module.fail_json(msg='file not found: %s' % config_file) if os.path.isdir(config_file): module.fail_json( msg='directory can not be used as config file: %s' % config_file) with open(config_file, 'rb') as f: return parser.string_to_dict(f.read()) else: auth_config = {} host = module.params.get('tower_host') if host: auth_config['host'] = host username = module.params.get('tower_username') if username: auth_config['username'] = username password = module.params.get('tower_password') if password: auth_config['password'] = password verify_ssl = module.params.get('tower_verify_ssl') if verify_ssl is not None: auth_config['verify_ssl'] = verify_ssl return auth_config
def tower_config(): config_file = '~/.tower_cli.cfg' config_file = os.path.expanduser(config_file) if not os.path.exists(config_file): module.fail_json(msg='file not found: %s' % config_file) with open(config_file, 'rb') as f: return parser.string_to_dict(f.read())
def convert(self, value, param, ctx): s = super(StructuredInput, self).convert(value, param, ctx) try: return string_to_dict(s, allow_kv=False) except Exception: raise exc.UsageError( 'Error loading structured input given by %s parameter. Please ' 'check the validity of your JSON/YAML format.' % param.name)
def convert(self, value, param, ctx): s = super(StructuredInput, self).convert(value, param, ctx) try: return string_to_dict(s, allow_kv=False) except Exception: raise exc.UsageError( 'Error loading structured input given by %s parameter. Please ' 'check the validity of your JSON/YAML format.' % param.name )
def tower_auth_config(module): config_file = module.params.get('config_file') if not config_file: return {} config_file = os.path.expanduser(config_file) if not os.path.exists(config_file): module.fail_json(msg='file not found: %s' % config_file) if os.path.isdir(config_file): module.fail_json(msg='directory can not be used as config file: %s' % config_file) with open(config_file, 'rb') as f: return parser.string_to_dict(f.read())
def test_handling_bad_data(self): """Check robustness of the parser functions in how it handles empty strings, null values, etc.""" # Verify that all parts of the computational chain can handle None return_dict = parser.parse_kv(None) self.assertEqual(return_dict, {}) return_dict = parser.string_to_dict(None) self.assertEqual(return_dict, {}) # Verrify that all parts of computational chain can handle "" return_dict = parser.parse_kv("") self.assertEqual(return_dict, {}) return_dict = parser.string_to_dict("") self.assertEqual(return_dict, {}) # Check that the behavior is what we want if feeding it an int return_dict = parser.parse_kv("foo=5") self.assertEqual(return_dict, {"foo": 5}) # Check that an empty extra_vars list doesn't blow up return_str = parser.process_extra_vars([]) self.assertEqual(return_str, "") return_str = parser.process_extra_vars([""], force_json=False) self.assertEqual(return_str, "")
def test_handling_bad_data(self): """Check robustness of the parser functions in how it handles empty strings, null values, etc.""" # Verify that all parts of the computational chain can handle None return_dict = parser.parse_kv(None) self.assertEqual(return_dict, {}) return_dict = parser.string_to_dict(None) self.assertEqual(return_dict, {}) return_val = parser.file_or_yaml_split(None) self.assertEqual(return_val, None) # Verrify that all parts of computational chain can handle "" return_dict = parser.parse_kv("") self.assertEqual(return_dict, {}) return_dict = parser.string_to_dict("") self.assertEqual(return_dict, {}) return_val = parser.file_or_yaml_split("") self.assertEqual(return_val, "") # Check that the behavior is what we want if feeding it an int return_dict = parser.parse_kv(5) self.assertEqual(return_dict, {"_raw_params": "5"}) return_dict = parser.parse_kv("foo=5") self.assertEqual(return_dict, {"foo": 5})
def schema(self, wfjt, node_network=None): """ Convert YAML/JSON content into workflow node objects if node_network param is given. If not, print a YAML representation of the node network. =====API DOCS===== Convert YAML/JSON content into workflow node objects if ``node_network`` param is given. If not, print a YAML representation of the node network. :param wfjt: Primary key or name of the workflow job template to run schema against. :type wfjt: str :param node_network: JSON- or YAML-formatted string representing the topology of the workflow job template be updated to. :type node_network: str :returns: The latest topology (possibly after modification) of the workflow job template. :rtype: dict =====API DOCS===== """ existing_network = self._get_schema(wfjt) if not isinstance(existing_network, list): existing_network = [] if node_network is None: if settings.format == 'human': settings.format = 'yaml' return existing_network if hasattr(node_network, 'read'): node_network = node_network.read() node_network = string_to_dict(node_network, allow_kv=False, require_dict=False) if not isinstance(node_network, list): node_network = [] _update_workflow( [TreeNode(x, wfjt, include_id=True) for x in existing_network], [TreeNode(x, wfjt) for x in node_network]) if settings.format == 'human': settings.format = 'yaml' return self._get_schema(wfjt)
def schema(self, wfjt, node_network=None): """ Convert YAML/JSON content into workflow node objects if node_network param is given. If not, print a YAML representation of the node network. =====API DOCS===== Convert YAML/JSON content into workflow node objects if ``node_network`` param is given. If not, print a YAML representation of the node network. :param wfjt: Primary key or name of the workflow job template to run schema against. :type wfjt: str :param node_network: JSON- or YAML-formatted string representing the topology of the workflow job template be updated to. :type node_network: str :returns: The latest topology (possibly after modification) of the workflow job template. :rtype: dict =====API DOCS===== """ existing_network = self._get_schema(wfjt) if not isinstance(existing_network, list): existing_network = [] if node_network is None: if settings.format == 'human': settings.format = 'yaml' return existing_network if hasattr(node_network, 'read'): node_network = node_network.read() node_network = string_to_dict( node_network, allow_kv=False, require_dict=False) if not isinstance(node_network, list): node_network = [] _update_workflow([TreeNode(x, wfjt, include_id=True) for x in existing_network], [TreeNode(x, wfjt) for x in node_network]) if settings.format == 'human': settings.format = 'yaml' return self._get_schema(wfjt)
def test_custom_parse_list(self): """Custom input-output scenario tests.""" for data in self.CUSTOM_DATA: self.assertEqual(parser.string_to_dict(data[0], allow_kv=True), data[1])
def test_parse_list(self): """Run tests on the data from Ansible core project.""" for data in self.SPLIT_DATA: self.assertEqual(parser.string_to_dict(data[0], allow_kv=True), data[2])
def test_handling_unicode(self): """Verify that unicode strings are correctly parsed and converted to desired python objects""" input_unicode = u"the_user_name='äöü ÄÖÜ'" return_dict = parser.string_to_dict(input_unicode) self.assertEqual(return_dict, {u'the_user_name': u'äöü ÄÖÜ'})
def schema(self, wfjt, node_network=None): """ Convert YAML/JSON content into workflow node objects if node_network param is given. If not, print a YAML representation of the node network. """ if node_network is None: if settings.format == 'human': settings.format = 'yaml' return self._get_schema(wfjt) node_res = get_resource('node') def create_node(node_branch, parent, relationship): # Create node with data specified by top-level keys create_data = {} FK_FIELDS = JOB_TYPES.values() + ['inventory', 'credential'] for fd in NODE_STANDARD_FIELDS + JOB_TYPES.values(): if fd in node_branch: if (fd in FK_FIELDS and not isinstance(node_branch[fd], int)): # Node's template was given by name, do lookup ujt_res = get_resource(fd) ujt_data = ujt_res.get(name=node_branch[fd]) create_data[fd] = ujt_data['id'] else: create_data[fd] = node_branch[fd] create_data['workflow_job_template'] = wfjt return node_res._get_or_create_child(parent, relationship, **create_data) def get_adj_list(node_branch): ret = {} for fd in node_branch: for rel in ['success', 'failure', 'always']: if fd.startswith(rel): sub_branch_list = node_branch[fd] if not isinstance(sub_branch_list, list): raise BadRequest( 'Sublists in spec must be lists.' 'Encountered in {0} at {1}'.format( fd, sub_branch_list)) ret[rel] = sub_branch_list break return ret def create_node_recursive(node_network): queue = deque() id_queue = deque() for base_node in node_network: queue.append(base_node) id_queue.append(create_node(base_node, None, None)['id']) while (len(queue) != 0): to_expand = queue.popleft() parent_id = id_queue.popleft() adj_list = get_adj_list(to_expand) for rel in adj_list: for sub_node in adj_list[rel]: id_queue.append( create_node(sub_node, parent_id, rel)['id']) queue.append(sub_node) node_res._assoc(node_res._forward_rel_name(rel), parent_id, id_queue[-1]) if hasattr(node_network, 'read'): node_network = node_network.read() node_network = string_to_dict(node_network, allow_kv=False, require_dict=False) create_node_recursive(node_network) if settings.format == 'human': settings.format = 'yaml' return self._get_schema(wfjt)
def test_custom_parse_list(self): """Custom input-output scenario tests.""" for data in self.CUSTOM_DATA: self.assertEqual(parser.string_to_dict(data[0]), data[1])
def test_parse_list(self): """Run tests on the data from Ansible core project.""" for data in self.SPLIT_DATA: self.assertEqual(parser.string_to_dict(data[0]), data[2])