def delete_devices(api, workflow_dict): _schema = 'devices.schema.devices' logger.info('devices::delete_devices') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] devices_db = api.devices.get_device_list() for device in table_data: if device['hostname']: device_id = common.get_object_id(devices_db['response'], hostname=device['hostname'], strict=False) if device['presence'] == 'absent' and device_id is not None: logger.info('device {} with id: {} will be deleted'.format( device['hostname'], device_id)) # URI: GET device-info is used to check if a device is provisioned or not. device_info_url = 'api/v2/data/customer-facing-service/DeviceInfo' device_info = api.custom_caller.call_api( 'GET', device_info_url, params={'networkDeviceId': device_id}) # If device is already provisioned, we need to use a DeviceInfo API to delete it. device_info_id = common.get_object_id( device_info['response'], networkDeviceId=device['id']) if device_info_id is not None: # URI: DELETE but only if the device is already provisioned. We check if the device is in "device-info" delete_url = 'api/v2/data/customer-facing-service/DeviceInfo/{}'.format( device_info_id) else: # URI: DELETE if the device is not 'in use' by provisioning delete_url = 'api/v1/network-device/{}'.format( device_id) result = api.custom_caller.call_api( 'DELETE', delete_url, params={'cleanConfig': True}) status = common.wait_for_task_completion( api, result['response'], timeout=30) logger.debug(status) # if device fails cleanup - force device delete if status['response'][ 'isError'] == True and "Configuration cleanup failed" in status[ 'response']['progress']: delete_url = 'api/v1/network-device/{}'.format( device['id']) result = api.custom_caller.call_api( 'DELETE', delete_url, params={'isForceDelete': True}) status = common.wait_for_task_completion( api, result['response'], timeout=30) logger.debug(status) else: logger.error('schema not found: {}'.format(_schema))
def delete_global_credentials(api, workflow_dict): logger.info('discovery::delete_global_credentials') for key, value in workflow_dict.items(): if 'native' in key: _junk, _name, _key = key.split('?') # Cycle through the rows and create entries with 'present' set for row in value: if 'absent' in row[ 'presence'] and 'writeCommunity' in row.keys(): _creds = api.network_discovery.get_global_credentials( 'SNMPV2_WRITE_COMMUNITY') _id = common.get_object_id(_creds['response'], description=row[_key]) if _id is not None: logger.info( 'Deleting SNMPV2_WRITE_COMMUNITY with id: {}'. format(_id)) api.network_discovery.delete_global_credentials_by_id( _id) else: logger.info( 'SNMPV2_WRITE_COMMUNITY with description "{}" does not exist' .format(_id)) elif 'absent' in row[ 'presence'] and 'readCommunity' in row.keys(): _creds = api.network_discovery.get_global_credentials( 'SNMPV2_READ_COMMUNITY') _id = common.get_object_id(_creds['response'], description=row[_key]) if _id is not None: logger.info( 'Deleting SNMPV2_READ_COMMUNITY with id: {}'. format(_id)) api.network_discovery.delete_global_credentials_by_id( _id) else: logger.info( 'SNMPV2_READ_COMMUNITY with description "{}" does not exist' .format(_id)) elif 'absent' in row['presence'] and 'username' in row.keys(): _creds = api.network_discovery.get_global_credentials( 'CLI') _id = common.get_object_id(_creds['response'], description=row[_key]) if _id is not None: logger.info('Deleting CLI with id: {}'.format(_id)) api.network_discovery.delete_global_credentials_by_id( _id) else: logger.info( 'CLI with description "{}" does not exist'.format( _id))
def create_global_credentials(api, workflow_dict): logger.info('discovery::create_global_credentials') for key, value in workflow_dict.items(): if 'native' in key: _junk, _name, _key = key.split('?') # Cycle through the rows and create entries with 'present' set for row in value: if 'present' in row[ 'presence'] and 'writeCommunity' in row.keys(): _creds = api.network_discovery.get_global_credentials( 'SNMPV2_WRITE_COMMUNITY') _id = common.get_object_id(_creds['response'], description=row[_key]) if _id is not None: logger.info( 'SNMPV2_WRITE_COMMUNITY exists with id: {}'.format( _id)) else: result = api.network_discovery.create_snmp_write_community( payload=[common.dot_to_json(row)]) logger.debug( api.custom_caller.call_api( 'GET', result['response']['url'])) elif 'present' in row[ 'presence'] and 'readCommunity' in row.keys(): _creds = api.network_discovery.get_global_credentials( 'SNMPV2_READ_COMMUNITY') _id = common.get_object_id(_creds['response'], description=row[_key]) if _id is not None: logger.info( 'SNMPV2_READ_COMMUNITY exists with id: {}'.format( _id)) else: result = api.network_discovery.create_snmp_read_community( payload=[common.dot_to_json(row)]) logger.debug( api.custom_caller.call_api( 'GET', result['response']['url'])) elif 'present' in row['presence'] and 'username' in row.keys(): _creds = api.network_discovery.get_global_credentials( 'CLI') _id = common.get_object_id(_creds['response'], description=row[_key]) if _id is not None: logger.info('CLI exists with id: {}'.format(_id)) else: result = api.network_discovery.create_cli_credentials( payload=[common.dot_to_json(row)]) logger.debug( api.custom_caller.call_api( 'GET', result['response']['url']))
def run_discovery(api, workflow_dict): logger.info('discovery::run_discovery') for key, value in workflow_dict.items(): if 'native' in key: _junk, _name, _key = key.split('?') # Cycle through the rows and create entries with 'present' set for row in value: if 'present' in row['presence'] and _name == 'discovery': # logger.info(row) _creds = [] _cli = api.network_discovery.get_global_credentials('CLI') _id = common.get_object_id(_cli['response'], description=row['cli']) _creds.append(_id) _cli = api.network_discovery.get_global_credentials( 'SNMPV2_READ_COMMUNITY') _id = common.get_object_id(_cli['response'], description=row['snmp_ro']) _creds.append(_id) _cli = api.network_discovery.get_global_credentials( 'SNMPV2_WRITE_COMMUNITY') _id = common.get_object_id(_cli['response'], description=row['snmp_rw']) _creds.append(_id) _discovery_range = '{}-{}'.format(row['startIp'], row['endIp']) _discovery = { "discoveryType": row['discoveryType'], "preferredMgmtIPMethod": row['preferredMgmtIPMethod'], "ipAddressList": _discovery_range, "protocolOrder": "ssh", "timeout": 5, "retry": 3, "globalCredentialIdList": _creds, "name": row['name'] } logger.info('Adding discovery ... ') result = api.network_discovery.start_discovery( payload=_discovery) common.wait_for_task_completion(api, result['response']) logger.debug(result)
def delete(api, workflow_dict): """ Deletes DNA Center areas, sites and floors. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of sites (see schema.py); :returns: Nothing """ _schema = 'sites.schema.sites' logger.info('sites::delete') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] for row in table_data: _sites_db = api.sites.get_site() _deleted_sites = [] if 'absent' in row['presence']: site_name_hierarchy = '{}/{}'.format(row['parentName'], row['name']) _id = common.get_object_id(_sites_db['response'], siteNameHierarchy=site_name_hierarchy) if _id is not None: # When deleting a site we need to figure out the children and delete in reverse _child_list_sorted = _get_sorted_child_list(_sites_db['response'], _id) for _child in _child_list_sorted: logger.info('Deleting site: {}'.format(_child[0])) logger.debug('Deleting: {} with id: {}'.format(_child[0], _child[1])) if _child[1] not in _deleted_sites: result = api.sites.delete_site(site_id=_child[1]) status = common.wait_for_task_completion(api, result) logger.debug(status) _deleted_sites.append(_child[1]) else: logger.error('schema not found: {}'.format(_schema))
def query(params): query_obj = dict() if bool(params.get('post_status', '')): query_obj["post_status"] = { "$in": json.loads(params.get('post_status')) } if bool(params.get('post_time', '')): print(params["post_time"]) date = parse_date(date=params['post_time'], date_format="%m-%d-%Y") start_date = date.strftime("%Y-%m-%d") + ' 00:00' end_date = date.strftime("%Y-%m-%d") + ' 23:59' query_obj["post_time"] = {"$gt": start_date, "$lt": end_date} if bool(params.get('description', '')): query_obj["description"] = {'$regex': params.get('description')} if bool(params.get('last_doc_id', '')): query_obj["_id"] = {"$lt": ObjectId(params["last_doc_id"])} mongo_client = Posts() collection = [] for record in mongo_client.find(filter=query_obj, view="CLIENT").sort('post_time', -1).limit(50): if record.get("_id", ''): record["_id"] = get_object_id(record) collection.append(record) response = dict(data=collection, total_records=len(collection)) return response
def delete_pools(api, workflow_dict): logger.info('ip_pool::delete_pools') for key, value in workflow_dict.items(): if 'native' in key: _junk, _workflow_name, _table_name = key.split('?') # Cycle through the rows and create entries with 'present' set if _table_name == 'ip_pools': _ip_pool_db = api.custom_caller.call_api('GET', pools_uri) logger.debug(_ip_pool_db) for row in value: _sites_db = api.sites.get_site() if 'absent' in row['presence']: _id = common.get_object_id( _ip_pool_db['response'], ipPoolName=row['ipPoolName']) if _id is not None: logger.info('Deleting: {} with id: {}'.format( row['ipPoolName'], _id)) _delete_uri = '{}/{}'.format(pools_uri, _id) result = api.custom_caller.call_api( 'DELETE', _delete_uri, json=common.dot_to_json(row)) if result.response.taskId: common.wait_for_task_completion( api, result.response) else: continue
def delete_discovery(api, workflow_dict): """ Deletes DNA Center Device Discoveries based on input from workflow_dict. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of discovery job definitions (see schema.yaml); :returns: Nothing """ _schema = 'discovery.schema.discovery' logger.info('discovery::delete_discovery') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # Cycle through the rows and create entries with 'absent' set for row in table_data: if row['presence'] and 'absent' in row['presence']: # URI: GET to discover ID of existing discoveries so that we can delete/update _discovery = api.custom_caller.call_api('GET', '/api/v1/discovery/1/10') _id = common.get_object_id(_discovery['response'], name=row['name']) if _id is not None: logger.info('Deleting discovery with id: {}'.format(_id)) api.network_discovery.delete_discovery_by_id(_id) else: logger.info('Discovery with name "{}" does not exist'.format(row['name'])) else: logger.error('schema not found: {}'.format(_schema))
def provision_devices(api, workflow_dict): _schema = 'devices.schema.devices' logger.info('devices::provision_devices') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] sites_db = api.sites.get_site() devices_db = api.devices.get_device_list() for device in table_data: if device['presence'] == 'absent' or device[ 'provisionDevice'] == False: continue site_id = common.get_object_id( sites_db['response'], siteNameHierarchy=device['location']) device_id = common.get_object_id(devices_db['response'], hostname=device['hostname'], strict=False) device_name = common.get_object_id(devices_db['response'], return_param='hostname', hostname=device['hostname'], strict=False) logger.info('Provisioning for device: {}, at site: {}'.format( device['hostname'], device['location'])) payload_data = {} payload_data.update({'name': device_name}) payload_data.update({'networkDeviceId': device_id}) payload_data.update({'siteId': site_id}) data = common.build_json_from_template( templates.provision_device_j2, payload_data) result = api.custom_caller.call_api('POST', provision_device_url, json=data) status = common.wait_for_task_completion(api, result['response'], timeout=45, tree=True) logger.debug(status) else: logger.error('schema not found: {}'.format(_schema))
def _wait_for_device_presence(api, hostname, timeout=10): time.sleep(5) # URI: GET We are trying to determine, after a device has been discovered, when it is ready for provisioning (DNW) topology_db_uri = 'api/v1/topology/physical-topology?nodeType=device&__preventCache=1573344887666' topology_db = api.custom_caller.call_api('GET', topology_db_uri) device_ip = common.get_object_id(topology_db['response']['nodes'], return_param='ip', label=hostname) t = timeout while True: if device_ip is None: # logger.debug(topology_db) logger.info( 'Device IP for {} not found. Sleeping for 5 seconds ...'. format(hostname)) time.sleep(5) topology_db = api.custom_caller.call_api('GET', topology_db_uri) device_ip = common.get_object_id(topology_db['response']['nodes'], return_param='ip', label=hostname, strict=False) logger.debug('Device search returned ip: {}'.format(device_ip)) t = t - 5 else: logger.info( 'Device {} now found in topology DB with IP: {}'.format( hostname, device_ip)) # logger.info('Doing a lame sleep for 60 seconds') # time.sleep(60) logger.debug(topology_db) break if t < 1: logger.error( 'Timeout waiting for task to complete: Unable to find {}'. format(hostname)) logger.debug(topology_db) break return topology_db
def delete_network_profiles(api, workflow_dict): """ Deletes switching Network Profiles configured based oo input from workflow_dict. Wireless profile deletion is handled by wireless delete function. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of network profiles (see schema.py); :returns: Nothing """ _schema = 'network_profiles.schema.network_profiles' logger.info('network_profiles::delete_network_profiles') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # get profile list to delete marked as absent profiles = [profile for profile in table_data if profile['presence'] == 'absent' and profile['name']] # get site ids sites = api.sites.get_site() # remove Profiles if profiles: for profile in profiles: # TODO This is the second time for this to be used... maybe create a function # get current sites and site ids data = api.custom_caller.call_api('GET', network_profile_base_url, params={"name": profile['name']}) if data['response']: profile['id'] = data['response'][0]['siteProfileUuid'] # if location then assign it to profile # split up sites if csv if profile['sites']: logger.info('network_profiles::delete_site') profile['sites'] = common.csv_string_to_list(profile['sites']) # get site id and add it to profile for new_site in profile['sites']: site_id = common.get_object_id(sites['response'], siteNameHierarchy=new_site) site_add_url = "{}/{}/site/{}".format(network_profile_base_url, profile['id'], site_id) result = api.custom_caller.call_api('DELETE', site_add_url) logger.debug(result) common.wait_for_task_completion(api, result['response']) else: logger.info('network_profiles: No Sites to delete') # Now delete the profile logger.info('network profiles: Deleting {}'.format(profile['name'])) profile_url = "{}/{}".format(network_profile_base_url, profile['id']) result = api.custom_caller.call_api('DELETE', profile_url) logger.debug(result) common.wait_for_task_completion(api, result['response']) else: logger.error('schema not found: {}'.format(_schema))
def delete_reservations(api, delete_workflow_dict): logger.info('ip_pool::delete_reservations') for key, value in delete_workflow_dict.items(): if 'native' in key: _junk, _workflow_name, _table_name = key.split('?') # Cycle through the rows and create entries with 'present' set if _table_name == 'ip_reservations': _ip_pool_db = api.custom_caller.call_api('GET', pools_uri) _sites_db = api.sites.get_site() logger.debug('******** _ip_pool_db *********') logger.debug(_ip_pool_db) logger.debug('******** _sites_db *********') logger.debug(_sites_db) for row in value: if 'absent' in row['presence']: _site_id = common.get_object_id( _sites_db['response'], siteNameHierarchy=row['siteName']) groups_uri = '{}?siteId={}'.format( groups_uri_base, _site_id) _ip_groups_db = api.custom_caller.call_api( 'GET', groups_uri) _id = common.get_object_id(_ip_groups_db['response'], groupName=row['groupName']) if _id is not None: logger.info( 'Releasing reservation: {} with id: {}'.format( row['groupName'], _id)) delete_uri = '{}/{}'.format(groups_uri_base, _id) result = api.custom_caller.call_api( 'DELETE', delete_uri) logger.debug(result) if result.response.taskId: common.wait_for_task_completion( api, result.response)
def _create_floors(api, sites_db, table_data): _table_key = 'name' # Cycle through the rows and create entries with 'present' set for row in table_data: if 'present' in row['presence'] and 'floor' in row['type']: site_name_hierarchy = '{}/{}'.format(row['parentName'], row['name']) _id = common.get_object_id(sites_db, siteNameHierarchy=site_name_hierarchy) if _id is not None: logger.info('Floor: {}/{} already exists with id: {}'.format(row['parentName'], row[_table_key], _id)) else: logger.info('Creating floor: {}/{}'.format(row['parentName'], row[_table_key])) data = common.build_json_from_template(templates.floor_j2, row) logger.debug('Building payload: {}'.format(data)) result = api.sites.create_site(payload=data) status = common.wait_for_task_completion(api, result) logger.debug(status)
def delete_aaa(api, workflow_dict): """ Deletes DNA Center Global AAA Server based on input from workflow_dict. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows with AAA server settings required (see schema.yaml); :returns: Nothing """ _schema = 'aaa.schema.system_settings' logger.info('system_settings::delete_aaa') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # get Current AAA Servers and get a name list _aaa_db = api.custom_caller.call_api('GET', aaa_uri)['response'] # TODO Check to make sure ISE not specified in network settings if _aaa_db: aaa_ips = [aaa['ipAddress'] for aaa in _aaa_db] for row in table_data: if row['presence'] == 'absent': if row['ipAddress'] in aaa_ips: aaa_id = common.get_object_id(_aaa_db, ipAddress=row['ipAddress'], return_param="instanceUuid") logger.info('Deleting AAA: {}'.format(row['ipAddress'])) result = api.custom_caller.call_api('DELETE', "{}/{}".format(aaa_uri, aaa_id)) status = common.wait_for_task_completion(api, result.response) logger.debug(status) else: logger.info('system_settings::AAA {} does not exist in system'.format(row['ipAddress'])) else: logger.info('system_settings::No AAA Servers are currently Configured') else: logger.error('schema not found: {}'.format(_schema))
def _create_buildings(api, sites_db, table_data): _table_key = 'name' # Cycle through the rows and create entries with 'present' set for row in table_data: if 'present' in row['presence'] and 'building' in row['type']: site_name_hierarchy = '{}/{}'.format(row['parentName'], row['name']) _id = common.get_object_id(sites_db, siteNameHierarchy=site_name_hierarchy) if _id is not None: logger.info('Building: {}/{} already exists with id: {}'.format(row['parentName'], row[_table_key], _id)) else: logger.info('Creating building: {}/{}'.format(row['parentName'], row[_table_key])) data = common.build_json_from_template(templates.building_j2, row) data['site']['building']['address'] = '{}, {}, {}'.format( row['street'], row['city'], row['country'] ) # If lat and lon are defined, they are used instead of _address_lookup if row['latitude'] is not None and row['longitude'] is not None: data['site']['building']['longitude'] = float(row['longitude']) data['site']['building']['latitude'] = float(row['latitude']) else: location = _address_lookup(row['street'], row['city'], row['country']) if location is not None: logger.info('Address lookup: SUCCESS') data['site']['building']['address'] = location['address'] data['site']['building']['longitude'] = float(location['lon']) data['site']['building']['latitude'] = float(location['lat']) else: logger.info('Address lookup: FAILURE') logger.debug('Building payload: {}'.format(data)) result = api.sites.create_site(payload=data) status = common.wait_for_task_completion(api, result) logger.debug(status)
def delete_discovery(api, workflow_dict): logger.info('discovery::delete_discovery') for key, value in workflow_dict.items(): if 'native' in key: _junk, _name, _key = key.split('?') # Cycle through the rows and create entries with 'present' set for row in value: logger.info(_name) if 'absent' in row['presence'] and _name == 'discovery': _discovery = api.custom_caller.call_api( 'GET', '/api/v1/discovery/1/10') _id = common.get_object_id(_discovery['response'], name=row[_key]) if _id is not None: logger.info( 'Deleting discovery with id: {}'.format(_id)) api.network_discovery.delete_discovery_by_id(_id) else: logger.info( 'Discovery with name "{}" does not exist'.format( _id))
def create_pools(api, workflow_dict): logger.info('ip_pool::create_pools') for key, value in workflow_dict.items(): if 'native' in key: _junk, _workflow_name, _table_name = key.split('?') # Cycle through the rows and create entries with 'present' set if _table_name == 'ip_pools': _ip_pool_db = api.custom_caller.call_api('GET', pools_uri) for row in value: if 'present' in row['presence']: _id = common.get_object_id( _ip_pool_db['response'], ipPoolName=row['ipPoolName']) if _id is not None: logger.info( 'IP Pool: {} already exists with id: {}'. format(row['ipPoolName'], _id)) pass else: logger.info('Creating IP Pool: {}'.format( row['ipPoolName'])) data = templates.ip_pool data['ipPoolName'] = row['ipPoolName'] data['ipPoolCidr'] = row['ipPoolCidr'] result = api.custom_caller.call_api('POST', pools_uri, json=data) logger.debug(result) if result.response.taskId: common.wait_for_task_completion( api, result.response)
def create_network_profile(api, workflow_dict): """ Creates switching Network Profiles. Wireless profile creation is handled in wireless module. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of network profiles (see schema.py); :returns: Nothing """ _schema = 'network_profiles.schema.network_profiles' logger.info('network_profiles::create_network_profiles') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # get profiles to be created new_profiles = [profile for profile in table_data if profile['presence'] == 'present'] if new_profiles: # check for dupe profile names result = api.custom_caller.call_api('GET', network_profile_base_url, params={"namespace": "switching"}) current_profiles = result['response'] current_profile_names = [profile['name'] for profile in current_profiles] # get site ids sites = api.sites.get_site() # now create it for profile in new_profiles: if profile['name'] in current_profile_names: logger.info('network_profiles:: {} already exists'.format(profile['name'])) pass else: logger.info('network profiles: Creating {}'.format(profile['name'])) data = common.build_json_from_template(templates.add_profile_j2, profile) result = api.custom_caller.call_api('POST', network_profile_base_url, json=data) logger.debug(result) create_result = common.wait_for_task_completion(api, result['response']) # if successful creation lets move on to adding sites if create_result['response']['isError'] == False: data = api.custom_caller.call_api('GET', network_profile_base_url, params={"name": profile['name']}) profile['id'] = data['response'][0]['siteProfileUuid'] # if location then assign it to profile # split up sites if csv if profile['sites']: logger.info('network_profiles::update_site') profile['sites'] = common.csv_string_to_list(profile['sites']) # get site id and add it to profile for new_site in profile['sites']: site_id = common.get_object_id(sites['response'], siteNameHierarchy=new_site) site_add_url = "{}/{}/site/{}".format(network_profile_base_url, profile['id'], site_id) result = api.custom_caller.call_api('POST', site_add_url) logger.debug(result) common.wait_for_task_completion(api, result['response']) # Assign templates to profile # first build out objects needed starting with day0 if profile['day0Template'] or profile['cliTemplate']: if profile['day0Template']: day0_list = common.csv_string_to_list(profile['day0Template']) day0_obj = [] if profile['product_series']: template_db = api.template_programmer.gets_the_templates_available( product_family="Switches and Hubs", product_series=profile['product_series'] ) else: template_db = api.template_programmer.gets_the_templates_available( productFamily="Switches and Hubs") # build object with names/ids for template in day0_list: for item in template_db: if item['name'] == template: # Get latest version version_list = [version['version'] for version in item['versionsInfo']] version = max(version_list) # Update Object day0_obj.append( {"name": item['name'], "id": item['templateId'], "version": version}) profile['day0Template'] = day0_obj # now build day 1 objects if profile['cliTemplate']: day1_list = common.csv_string_to_list(profile['cliTemplate']) day1_obj = [] if profile['product_series']: template_db = api.template_programmer.gets_the_templates_available( product_family="Switches and Hubs", product_series=profile['product_series'] ) else: template_db = api.template_programmer.gets_the_templates_available( product_family="Switches and Hubs") # build object with names/ids for template in day1_list: for item in template_db: if item['name'] == template: # Get latest version version_list = [version['version'] for version in item['versionsInfo']] version = max(version_list) # Update Object day1_obj.append( {"name": item['name'], "id": item['templateId'], "version": version}) # update profile with additional info needed (id,version #) profile['cliTemplate'] = day1_obj logger.info('network profiles::adding templates') data = common.build_json_from_template(templates.add_templates_to_profile_j2, profile) add_template_profile_url = "{}/{}".format(network_profile_base_url, profile['id']) result = api.custom_caller.call_api('PUT', add_template_profile_url, json=data) logger.debug(result) common.wait_for_task_completion(api, result['response']) else: logger.info('network profiles:: No Templates to add') else: logger.error('schema not found: {}'.format(_schema))
def create_reservations(api, workflow_dict): logger.info('ip_pool::create_reservations') for key, value in workflow_dict.items(): if 'native' in key: _junk, _workflow_name, _table_name = key.split('?') # Cycle through the rows and create entries with 'present' set if _table_name == 'ip_reservations': _ip_pool_db = api.custom_caller.call_api('GET', pools_uri) _sites_db = api.sites.get_site() for row in value: if 'present' in row['presence']: _site_id = common.get_object_id( _sites_db['response'], siteNameHierarchy=row['siteName']) _pool_parent_id = common.get_object_id( _ip_pool_db['response'], ipPoolName=row['ipPoolsParent']) groups_uri = '{}?siteId={}'.format( groups_uri_base, _site_id) _ip_groups_db = api.custom_caller.call_api( 'GET', groups_uri) _id = common.get_object_id(_ip_groups_db['response'], groupName=row['groupName']) data = templates.ip_reservation data['siteId'] = _site_id data['ipPools'][0]['parentUuid'] = _pool_parent_id data['groupName'] = row['groupName'] data['type'] = row['type'] data['ipPools'][0]['ipPoolCidr'] = row['ipReservation'] data['ipPools'][0]['parent'] = row['ipPoolsParent'] data['ipPools'][0][ 'dhcpServerIps'] = row['dhcpServerIps'].split( ',') if row['dhcpServerIps'] else [] if len(data['ipPools'][0]['dhcpServerIps']) > 0: data['ipPools'][0]['configureExternalDhcp']: True data['ipPools'][0][ 'dnsServerIps'] = row['dnsServerIps'].split( ',') if row['dnsServerIps'] else [] data['ipPools'][0]['gateways'] = row['gateways'].split( ',') if row['gateways'] else [] if _id is not None: logger.info( 'Reservation: {} already exists with id: {}'. format(row['groupName'], _id)) pass else: logger.info('Creating IP Reservation: {}'.format( row['groupName'])) result = api.custom_caller.call_api( 'POST', groups_uri_base, json=data) logger.debug(result) if result.response.taskId: common.wait_for_task_completion( api, result.response)
def delete_global_credentials(api, workflow_dict): """ Deletes DNA Center Global Credentials based on input from workflow_dict. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of credential definitions (cli, snmp-ro/rw) (see schema.yaml); :returns: Nothing """ _schema = 'snmpWrite.schema.network_settings' logger.info('network_settings::delete_global_credentials::snmpWrite') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # Cycle through the rows and create entries with 'present' set for row in table_data: if 'absent' in row['presence'] and 'writeCommunity' in row.keys(): _creds = api.network_discovery.get_global_credentials( 'SNMPV2_WRITE_COMMUNITY') _id = common.get_object_id(_creds['response'], description=row['description']) if _id is not None: logger.info( 'Deleting SNMPV2_WRITE_COMMUNITY with id: {}'.format( _id)) api.network_discovery.delete_global_credentials_by_id(_id) else: logger.info( 'SNMPV2_WRITE_COMMUNITY with description "{}" does not exist' .format(_id)) else: logger.error('schema not found: {}'.format(_schema)) _schema = 'snmpRead.schema.network_settings' logger.info('network_settings::delete_global_credentials::snmpRead') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] for row in table_data: if 'absent' in row['presence'] and 'readCommunity' in row.keys(): _creds = api.network_discovery.get_global_credentials( 'SNMPV2_READ_COMMUNITY') _id = common.get_object_id(_creds['response'], description=row['description']) if _id is not None: logger.info( 'Deleting SNMPV2_READ_COMMUNITY with id: {}'.format( _id)) api.network_discovery.delete_global_credentials_by_id(_id) else: logger.info( 'SNMPV2_READ_COMMUNITY with description "{}" does not exist' .format(_id)) else: logger.error('schema not found: {}'.format(_schema)) _schema = 'cli.schema.network_settings' logger.info('network_settings::delete_global_credentials::cli') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] for row in table_data: if 'absent' in row['presence'] and 'username' in row.keys(): _creds = api.network_discovery.get_global_credentials('CLI') _id = common.get_object_id(_creds['response'], description=row['description']) if _id is not None: logger.info('Deleting CLI with id: {}'.format(_id)) api.network_discovery.delete_global_credentials_by_id(_id) else: logger.info( 'CLI with description "{}" does not exist'.format(_id)) else: logger.error('schema not found: {}'.format(_schema))
def create_global_credentials(api, workflow_dict): """ Creates DNA Center Global Credentials. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows credential definitions (cli, snmp-ro/rw) (see schema.yaml); :returns: Nothing """ _schema = 'snmpWrite.schema.network_settings' logger.info('network_settings::create_global_credentials::snmpWrite') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # Cycle through the rows and create entries with 'present' set for row in table_data: if row['presence']: if 'present' in row['presence']: _creds = api.network_discovery.get_global_credentials( 'SNMPV2_WRITE_COMMUNITY') _id = common.get_object_id(_creds['response'], description=row['description']) if _id is not None: logger.info( 'SNMPV2_WRITE_COMMUNITY exists with id: {}'.format( _id)) else: logger.info('Creating SNMPV2_WRITE_COMMUNITY') result = api.network_discovery.create_snmp_write_community( payload=[common.dot_to_json(row)]) logger.debug(result) else: logger.error('schema not found: {}'.format(_schema)) _schema = 'snmpRead.schema.network_settings' logger.info('network_settings::create_global_credentials::snmpRead') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] for row in table_data: if row['presence']: if 'present' in row['presence']: _creds = api.network_discovery.get_global_credentials( 'SNMPV2_READ_COMMUNITY') _id = common.get_object_id(_creds['response'], description=row['description']) if _id is not None: logger.info( 'SNMPV2_READ_COMMUNITY exists with id: {}'.format( _id)) else: logger.info('Creating SNMPV2_READ_COMMUNITY') result = api.network_discovery.create_snmp_read_community( payload=[common.dot_to_json(row)]) logger.debug(result) else: logger.error('schema not found: {}'.format(_schema)) _schema = 'cli.schema.network_settings' logger.info('network_settings::create_global_credentials::cli') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] for row in table_data: if row['presence']: if 'present' in row['presence']: _creds = api.network_discovery.get_global_credentials( 'CLI') _id = common.get_object_id(_creds['response'], description=row['description']) if _id is not None: logger.info('CLI exists with id: {}'.format(_id)) else: logger.info( 'Creating CLI credentials for username: {}'.format( row['username'])) result = api.network_discovery.create_cli_credentials( payload=[common.dot_to_json(row)]) logger.debug(result) else: logger.error('schema not found: {}'.format(_schema))
def assign_to_site(api, workflow_dict): _schema = 'devices.schema.devices' logger.info('devices::assign_to_site') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # Hack until we figure out how to check device inventory status correctly # logger.info('devices::Sleep for 5 minutes ...') # time.sleep(300) sites_db = api.sites.get_site() for device in table_data: if 'absent' == device['presence']: continue site_id = common.get_object_id( sites_db['response'], siteNameHierarchy=device['location']) topology_db = api.custom_caller.call_api('GET', topology_db_uri) device_ip = common.get_object_id(topology_db['response']['nodes'], return_param='ip', label=device['hostname'], strict=False) if device_ip is None: logger.info('Device {} not found in topology database'.format( device['hostname'])) logger.info('Entering wait loop for maximum 60 seconds') topology_db = _wait_for_device_presence(api, device['hostname'], timeout=60) device_ip = common.get_object_id( topology_db['response']['nodes'], return_param='ip', label=device['hostname'], strict=False) if site_id is not None: # TODO Can this be converted to native SDK call? # URI: GET used to discover discover site membership site_member_uri = site_member_uri_pattern.format(site_id) site_membership = api.custom_caller.call_api( 'GET', site_member_uri) device_provisioned = 0 for members_response in site_membership['device']: # Check if device already provisioned under this site # This is suboptimal since the device could be provisioned under a different site already if len(members_response['response']): device_id = common.get_object_id( members_response['response'], return_param='instanceUuid', hostname=device['hostname'], strict=False) if device_id is not None: logger.info( 'Device {} already allocated to site {}'. format(device['hostname'], device['location'])) logger.debug( 'Hostname: {} instanceUuid: {}'.format( device['hostname'], device_id)) device_provisioned = 1 else: logger.info('Adding device {} to site {}'.format( device['hostname'], device['location'])) data = common.build_json_from_template( templates.device_to_site_j2, {'device_ip': device_ip}) result = api.sites.assign_device_to_site( site_id, payload=data) status = common.wait_for_task_completion( api, result) logger.debug(status) device_provisioned = 1 if not device_provisioned: # If we didn't find a device under this site, try and assign the device to site # Again, not optimal as the device may exist under a different site logger.info( 'Device {} not found under target site {}'.format( device['hostname'], device['location'])) logger.info('Trying to add device {} to site {}'.format( device['hostname'], device['location'])) data = common.build_json_from_template( templates.device_to_site_j2, {'device_ip': device_ip}) result = api.sites.assign_device_to_site(site_id, payload=data) status = common.wait_for_task_completion(api, result) logger.debug(status) else: logger.error('schema not found: {}'.format(_schema))
def run_discovery(api, workflow_dict): """ Initiates DNA Center Device Discovery based on input from workflow_dict. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of discovery job definitions (see schema.yaml); :returns: Nothing """ _schema = 'discovery.schema.discovery' logger.info('discovery::run_discovery') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] get_discoveries_url = '{}/1/99'.format(discovery_url) # Cycle through the rows and create entries with 'present' set for row in table_data: if row['presence'] and 'present' in row['presence']: # URI: GET to discover ID of existing discoveries so that we can delete/update _discovery = api.custom_caller.call_api('GET', get_discoveries_url) _id = common.get_object_id(_discovery['response'], name=row['name']) if _id is not None: data = { 'id': _id, 'discoveryStatus': 'Active'} # URI: PUT to start/rerun an existing discovery # discovery_url = 'api/v1/discovery' result = api.custom_caller.call_api('PUT', discovery_url, json=data) logger.info('Waiting a few seconds for discovery to start ...') time.sleep(5) status = common.wait_for_task_completion(api, result['response'], timeout=30) logger.debug(status) _wait_for_discovery_to_complete(api, _id) else: _creds = [] _cli = api.network_discovery.get_global_credentials('CLI') _id = common.get_object_id(_cli['response'], description=row['cli']) _creds.append(_id) _cli = api.network_discovery.get_global_credentials('SNMPV2_READ_COMMUNITY') _id = common.get_object_id(_cli['response'], description=row['snmp_ro']) _creds.append(_id) _cli = api.network_discovery.get_global_credentials('SNMPV2_WRITE_COMMUNITY') _id = common.get_object_id(_cli['response'], description=row['snmp_rw']) _creds.append(_id) _discovery_range = '{}-{}'.format(row['startIp'], row['endIp']) row.update({'discovery_range': _discovery_range}) data = common.build_json_from_template(templates.discovery_j2, row) data['globalCredentialIdList'] = _creds logger.info('Adding discovery ... ') result = api.network_discovery.start_discovery(payload=data) status = common.wait_for_task_completion(api, result['response']) logger.debug(status) _discovery = api.custom_caller.call_api('GET', get_discoveries_url) _id = common.get_object_id(_discovery['response'], name=row['name']) _wait_for_discovery_to_complete(api, _id) else: logger.error('schema not found: {}'.format(_schema))