def extract_inventory_relations(asset, relation_type): # Get the API options for the relation post_options = get_api_options(relation_type) # Get all of the hosts try: relations = tower_cli.get_resource(relation_type).list(all_pages=True, **{'inventory': asset['id']}) except TowerCLIError as e: raise TowerCLIError("Unable to get {} for {} : {}".format(relation_type, asset['id'], e)) return_relations = [] # If there are no results return an empty array if 'results' not in relations: return return_relations name_to_id_map = {} for relation in relations['results']: # if this relation is controlled by an inventory source we can skip it if 'has_inventory_sources' in relation and relation['has_inventory_sources']: continue name_to_id_map[relation['name']] = relation['id'] new_relation = {} map_node_to_post_options(post_options, relation, new_relation) if relation_type == 'inventory_source': # If this is an inventory source we also need to resolve the source_project if 'source_project' in relation and relation['source_project']: try: project = tower_cli.get_resource('project').get(relation['source_project']) except TowerCLIError as e: raise TowerCLIError("Unable to get project {} for {} : {}".format( relation['source_project'], relation_type, e )) new_relation['source_project'] = project['name'] if 'source_script' in relation and relation['source_script']: try: script = tower_cli.get_resource('inventory_script').get(relation['source_script']) except TowerCLIError as e: raise TowerCLIError("Unable to get inventory script {} for {} : {}".format( relation['source_script'], relation_type, e )) new_relation['source_script'] = script['name'] if 'credential' in relation and relation['credential']: try: credential = tower_cli.get_resource('credential').get(relation['credential']) except TowerCLIError as e: raise TowerCLIError("Unable to get inventory credential {} for {} : {}".format( relation['credential'], relation_type, e )) new_relation['credential'] = credential['name'] # Now get the schedules for this source if 'related' in relation and 'schedules' in relation['related']: schedule_data = extract_schedules(relation) new_relation['schedules'] = schedule_data['items'] del new_relation['inventory'] return_relations.append(new_relation) return {'items': return_relations, 'existing_name_to_id_map': name_to_id_map}
def test_show(self): """Establish that the show method will properly route to an alternate file. """ sio = StringIO() ex = TowerCLIError('Fe fi fo fum; I smell the blood of an Englishman.') ex.show(file=sio) sio.seek(0) self.assertIn('Fe fi fo fum;', sio.read())
def receive(self, all=False, asset_input=None): exported_objects = self.export_assets(all, asset_input) stdout = click.get_text_stream('stdout') if settings.format == 'human' or settings.format == 'json': import json stdout.write(json.dumps(exported_objects, indent=2)) elif settings.format == 'yaml': import yaml stdout.write(parser.ordered_dump(exported_objects, Dumper=yaml.SafeDumper, default_flow_style=False)) else: raise TowerCLIError("Format {} is unsupported".format(settings.format)) stdout.write("\n")
def custom_tower_receive(asset_type, asset_name): asset = { "name": f"{asset_name}", "asset_type": f"{asset_type}", "extra_vars": ( # this property must be string "{}") } infix = regex.match(asset_name).group(1) if infix in assets["different"]: # add a nonexisting property to create an asset difference asset["different"] = True elif infix in assets["missing"]: raise TowerCLIError("exception raised on missing asset") return [asset]
def resolve_asset_dependencies(an_asset, asset_type): for relation in an_asset['related']: if relation in an_asset: # Multiple credentials on things like job templates come through as: # vault_credential # machine_credential if relation.endswith("credential"): model_type = "credential" else: model_type = relation try: expanded_relation = tower_cli.get_resource(model_type).get(an_asset[relation]) except TowerCLIError as e: raise TowerCLIError("Unable to get {} named {}: {}".format(model_type, an_asset[relation], e)) identifier = get_identity(asset_type) if identifier in expanded_relation: an_asset[relation] = expanded_relation[identifier]
def get_assets_from_input(all=False, asset_input=None): return_assets = {} if all: for aType in SEND_ORDER: if aType not in return_assets: return_assets[aType] = {'all': True, 'names': []} return_assets[aType]['all'] = True else: for asset_type in asset_input: return_assets[asset_type] = {'all': False, 'names': []} for asset_name in asset_input[asset_type]: if asset_name == 'all': return_assets[asset_type]['all'] = True else: return_assets[asset_type]['names'].append(asset_name) if return_assets == {}: raise TowerCLIError("Nothing assets were specified") return return_assets
def export_assets(self, all, asset_input): # Extract and consolidate all of the items we got on the command line assets_to_export = common.get_assets_from_input(all, asset_input) # These will be returned from this method exported_objects = [] for asset_type in assets_to_export: # Load the API options for this asset_type of asset types_api_options = common.get_api_options(asset_type) # Now we are going to extract the objects from Tower and put them into an array for processing acquired_assets_to_export = [] identifier = common.get_identity(asset_type) # Now we are either going to get everything or just one item and append that to the assets_to_export if assets_to_export[asset_type]['all']: resources = tower_cli.get_resource(asset_type).list( all_pages=True) if 'results' not in resources: continue acquired_assets_to_export = acquired_assets_to_export + resources[ 'results'] else: for name in assets_to_export[asset_type]['names']: try: resource = tower_cli.get_resource(asset_type).get( **{identifier: name}) except TowerCLIError as e: raise TowerCLIError( "Unable to get {} named {} : {}".format( asset_type, name, e)) acquired_assets_to_export.append(resource) # Next we are going to loop over the objects we got from Tower for asset in acquired_assets_to_export: # If this object is managed_by_tower then move on if 'managed_by_tower' in asset and asset['managed_by_tower']: continue # Resolve the dependencies common.resolve_asset_dependencies(asset, asset_type) # Create a new object with the ASSET_TYPE_KEY and merge the options in from the object we got exported_asset = {common.ASSET_TYPE_KEY: asset_type} common.map_node_to_post_options(types_api_options, asset, exported_asset) # Clean up any $encrypted$ values common.remove_encrypted_values(exported_asset) # Special cases for touch up if asset_type == 'project': # Exported projects that are not manual don't need a local path common.remove_local_path_from_scm_project(exported_asset) # Next we are going to go after any of for relation in tower_cli.get_resource(asset_type).related: if common.ASSET_RELATION_KEY not in exported_asset: exported_asset[common.ASSET_RELATION_KEY] = {} if relation == 'workflow_nodes': exported_asset[common.ASSET_RELATION_KEY][ relation] = common.extract_workflow_nodes(asset) elif relation == 'survey_spec': survey_spec = tower_cli.get_resource( asset_type).survey(asset['id']) exported_asset[ common.ASSET_RELATION_KEY][relation] = survey_spec elif relation == 'host' or relation == 'inventory_source': exported_asset[common.ASSET_RELATION_KEY][relation] = \ common.extract_inventory_relations(asset, relation)['items'] elif relation == 'group': exported_asset[common.ASSET_RELATION_KEY][relation] = \ common.extract_inventory_groups(asset)['items'] elif relation == 'notification_templates': for notification_type in common.NOTIFICATION_TYPES: exported_asset[common.ASSET_RELATION_KEY][notification_type] = \ common.extract_notifications(asset, notification_type) elif relation == 'extra_credentials': exported_asset[common.ASSET_RELATION_KEY][relation] =\ common.extract_extra_credentials(asset)['items'] elif relation == 'schedules': exported_asset[common.ASSET_RELATION_KEY][relation] =\ common.extract_schedules(asset)['items'] # If this asset type is in the RESOURCE_FIELDS of the Role object than export its roles if asset_type in RESOURCE_FIELDS: if common.ASSET_RELATION_KEY not in exported_asset: exported_asset[common.ASSET_RELATION_KEY] = {} exported_asset[common.ASSET_RELATION_KEY][ 'roles'] = common.extract_roles(asset)['items'] # Finally add the object to the list of objects that are being exported exported_objects.append(exported_asset) return exported_objects
def extract_workflow_nodes(asset): # If workflow_node_post_options is not filled out, get it workflow_node_post_options = get_api_options('node') # Get the workflow nodes query_params = [("workflow_job_template", asset['id'])] nodes = tower_cli.get_resource('node').list( **{ "query": query_params, 'fail_on_multiple_results': False, 'all_pages': True }) # We have to temporarily stash these. # At the end of the process we need to go through all of the nodes and resolve the different # node types from their IDs to their names workflow_nodes_extracted = [] # This is a stash for us to map the IDs back to the labels workflow_node_to_name_mapping = {} node_number = 0 for workflow_node in nodes['results']: node_name = 'node{}'.format(node_number) node_number = node_number + 1 node_to_add = { "name": node_name, } workflow_node_to_name_mapping[workflow_node['id']] = node_name map_node_to_post_options(workflow_node_post_options, workflow_node, node_to_add) # We can delete the workflow_job_template since we will be applying it to this workflow if 'workflow_job_template' in node_to_add: del node_to_add["workflow_job_template"] # If the unified job template is missing, we can raise an error for this workflow if 'unified_job_template' not in node_to_add: raise TowerCLIError( "Workflow export exception: workflow {} has a node whose job template has been deleted" .format(asset['name'])) # Now we need to resolve the unified job template del node_to_add["unified_job_template"] node_to_add['unified_job_type'] = workflow_node["summary_fields"][ "unified_job_template"]["unified_job_type"] node_to_add['unified_job_name'] = workflow_node["summary_fields"][ "unified_job_template"]["name"] if 'credential' in workflow_node and workflow_node['credential']: node_to_add['credential'] = tower_cli.get_resource( 'credential').get(workflow_node['credential'])['name'] if 'inventory' in workflow_node and workflow_node['inventory']: node_to_add['inventory'] = tower_cli.get_resource('inventory').get( workflow_node['inventory'])['name'] # Finally copy over the different node types for node_type in tower_cli.get_resource( 'workflow').workflow_node_types: if node_type in workflow_node: node_to_add[node_type] = workflow_node[node_type] workflow_nodes_extracted.append(node_to_add) # Finally we need to resolve all of the node IDs in the different types for workflow_node in workflow_nodes_extracted: for node_type in tower_cli.get_resource( 'workflow').workflow_node_types: # Resolved nodes will be the resolved node names instead of IDs resolved_nodes = [] for a_node_id in workflow_node[node_type]: # If we found a node that does not resolve raise an exception if a_node_id not in workflow_node_to_name_mapping: raise TowerCLIError( "Workflow export exception: unable to resolve node {} from {}" .format(a_node_id, asset['name'])) # Add the new node to the list of resolved node resolved_nodes.append(workflow_node_to_name_mapping[a_node_id]) # Put the resolved nodes back into the object workflow_node[node_type] = resolved_nodes return workflow_nodes_extracted