def delete_pools(api, workflow_dict): logger.info('ip_pool::delete_pools') for key, value in workflow_dict.items(): if 'native' in key: _junk, _workflow_name, _table_name = key.split('?') # Cycle through the rows and create entries with 'present' set if _table_name == 'ip_pools': _ip_pool_db = api.custom_caller.call_api('GET', pools_uri) logger.debug(_ip_pool_db) for row in value: _sites_db = api.sites.get_site() if 'absent' in row['presence']: _id = common.get_object_id( _ip_pool_db['response'], ipPoolName=row['ipPoolName']) if _id is not None: logger.info('Deleting: {} with id: {}'.format( row['ipPoolName'], _id)) _delete_uri = '{}/{}'.format(pools_uri, _id) result = api.custom_caller.call_api( 'DELETE', _delete_uri, json=common.dot_to_json(row)) if result.response.taskId: common.wait_for_task_completion( api, result.response) else: continue
def export_samples(table_name: str) -> None: table = ee.FeatureCollection(f"{base_asset_directory}/{table_name}").getInfo() tasks = list(map( lambda p: export_point(p['id'], p['geometry']['coordinates'], table_name), table['features'] )) wait_for_task_completion(tasks)
def main(): ee.Initialize() model_years = range(2001, 2016) tasks = [] for year in model_years: task = export_selected_features_for_year(str(year)) tasks.append(task) wait_for_task_completion(tasks)
def main(): ee.Initialize() #model_years = ['2000', '2003', '2006', '2009', '2012', '2015', '2018'] model_years = ['2000', '2015'] tasks = [] for year in model_years: task = export_selected_features_for_year(year) tasks.append(task) wait_for_task_completion(tasks)
def main(): ee.Initialize() classifier = build_worldwide_model() model_years = range(2001, 2016) tasks = [] for year in model_years: task = classify_year(classifier, year) tasks.append(task) wait_for_task_completion(tasks)
def main(): years = range(2000, 2016) tasks = [] for year in years: combined_image = combine_maps(year) task = export_image_to_drive( combined_image, f"{model_snapshot_version}_combined_{year}") tasks.append(task) wait_for_task_completion(tasks)
def delete_devices(api, workflow_dict): _schema = 'devices.schema.devices' logger.info('devices::delete_devices') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] devices_db = api.devices.get_device_list() for device in table_data: if device['hostname']: device_id = common.get_object_id(devices_db['response'], hostname=device['hostname'], strict=False) if device['presence'] == 'absent' and device_id is not None: logger.info('device {} with id: {} will be deleted'.format( device['hostname'], device_id)) # URI: GET device-info is used to check if a device is provisioned or not. device_info_url = 'api/v2/data/customer-facing-service/DeviceInfo' device_info = api.custom_caller.call_api( 'GET', device_info_url, params={'networkDeviceId': device_id}) # If device is already provisioned, we need to use a DeviceInfo API to delete it. device_info_id = common.get_object_id( device_info['response'], networkDeviceId=device['id']) if device_info_id is not None: # URI: DELETE but only if the device is already provisioned. We check if the device is in "device-info" delete_url = 'api/v2/data/customer-facing-service/DeviceInfo/{}'.format( device_info_id) else: # URI: DELETE if the device is not 'in use' by provisioning delete_url = 'api/v1/network-device/{}'.format( device_id) result = api.custom_caller.call_api( 'DELETE', delete_url, params={'cleanConfig': True}) status = common.wait_for_task_completion( api, result['response'], timeout=30) logger.debug(status) # if device fails cleanup - force device delete if status['response'][ 'isError'] == True and "Configuration cleanup failed" in status[ 'response']['progress']: delete_url = 'api/v1/network-device/{}'.format( device['id']) result = api.custom_caller.call_api( 'DELETE', delete_url, params={'isForceDelete': True}) status = common.wait_for_task_completion( api, result['response'], timeout=30) logger.debug(status) else: logger.error('schema not found: {}'.format(_schema))
def main(): ee.Initialize() classifier = build_worldwide_model() #model_years = ['2000', '2003', '2006', '2009', '2012', '2015', '2018'] model_years = ['2000', '2015'] tasks = [] for year in model_years: task = classify_year(classifier, year) tasks.append(task) wait_for_task_completion(tasks)
def deploy_config_to_fusion(api, row): logger.info("border_handoff::Deploying config to {}".format( row['fusionIp'])) response = api.devices.get_device_list(managementIpAddress=row['fusionIp']) if response['response']: fusion_details = response['response'][0] response = api.devices.sync_devices_using_forcesync( payload=[fusion_details['id']]) common.wait_for_task_completion(api, response['response']) # format input correctly for functions if fusion_details['reachabilityStatus'] == "Reachable": row['templateName'] = "{}_Fusion_Border_Handoff_Config".format( row['fusionIp']) row['templateDescription'] = "Fusion Border Config By DNA Workflows" row['project'] = "Fusion Border Automation" row['composite'] = False row['productFamily'] = fusion_details['family'] row['productSeries'] = fusion_details['series'] row['softwareType'] = fusion_details['softwareType'] row['ipAddress'] = row['fusionIp'] row['hostName'] = row['fusionName'] row['presence'] = "present" # This is a hack... need to fix this to something more logical to feed into other modules data = {} data['templatesData?deployment'] = [] data['templatesData?templates'] = [] data['templatesData?deployment'].append(row) data['templatesData?templates'].append(row) # check for project and create if needed project = api.template_programmer.get_projects(row['project']) if not project: # create project response = api.template_programmer.create_project( payload={'name': row['project']}) # create template for fusion config response = template_flow.create_templates(api, data) # deploy config if template successfully pushed if not response['response']["isError"]: response = template_flow.deploy_templates(api, data) print(response) else: logger.info( "border_handoff::{} does not exist in DNA Center. Templates not Pushed" .format(row['fusionName'])) return
def create_network_settings(api, workflow_dict): """ Creates DNA Center Global Network Settings based on input from workflow_dict. If adding Client/Network AAA server settings, AAA server will need to already be configured in DNA Center. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows Network Settings (DHCP, DNS, etc) (see schema.yaml); :returns: Nothing """ _schema = 'globalSettings.schema.network_settings' logger.info('network_settings::create_network_settings') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # get settings for config settings = {} # rearrange into 1 dict so it is easier to work with for row in table_data: settings[row['item']] = row['value'] csv_fields = ['dhcpServer', 'syslogServer', 'snmpServer', 'ntpServer'] # convert csv strings to List for field in csv_fields: if settings[field]: settings[field] = common.csv_string_to_list(settings[field]) if settings: # proceed if site specified - if not exit if settings['site']: site_info = api.sites.get_site(name=settings['site']) site_id = site_info['response'][0]['id'] if site_info[ 'response'] else None # build URI and create network settings create_network_settings_uri = "{}/{}".format( network_settings_intent_uri, site_id) # using json builder from helper file data = common.build_json_from_template( templates.network_settings_intent_j2, settings) result = api.custom_caller.call_api( 'POST', create_network_settings_uri, json=data) logger.debug(result) common.wait_for_task_completion(api, result) else: logger.error( 'network_settings::create_network_settings:: Site required to update network settings' ) pass else: logger.error('schema not found: {}'.format(_schema))
def delete_network_profiles(api, workflow_dict): """ Deletes switching Network Profiles configured based oo input from workflow_dict. Wireless profile deletion is handled by wireless delete function. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of network profiles (see schema.py); :returns: Nothing """ _schema = 'network_profiles.schema.network_profiles' logger.info('network_profiles::delete_network_profiles') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # get profile list to delete marked as absent profiles = [profile for profile in table_data if profile['presence'] == 'absent' and profile['name']] # get site ids sites = api.sites.get_site() # remove Profiles if profiles: for profile in profiles: # TODO This is the second time for this to be used... maybe create a function # get current sites and site ids data = api.custom_caller.call_api('GET', network_profile_base_url, params={"name": profile['name']}) if data['response']: profile['id'] = data['response'][0]['siteProfileUuid'] # if location then assign it to profile # split up sites if csv if profile['sites']: logger.info('network_profiles::delete_site') profile['sites'] = common.csv_string_to_list(profile['sites']) # get site id and add it to profile for new_site in profile['sites']: site_id = common.get_object_id(sites['response'], siteNameHierarchy=new_site) site_add_url = "{}/{}/site/{}".format(network_profile_base_url, profile['id'], site_id) result = api.custom_caller.call_api('DELETE', site_add_url) logger.debug(result) common.wait_for_task_completion(api, result['response']) else: logger.info('network_profiles: No Sites to delete') # Now delete the profile logger.info('network profiles: Deleting {}'.format(profile['name'])) profile_url = "{}/{}".format(network_profile_base_url, profile['id']) result = api.custom_caller.call_api('DELETE', profile_url) logger.debug(result) common.wait_for_task_completion(api, result['response']) else: logger.error('schema not found: {}'.format(_schema))
def main(model_years): ee.Initialize() classifier = clf.build_worldwide_model() for year in model_years: tasks = [] task = features_exporter.export_selected_features_for_year(year) tasks.append(task) common.wait_for_task_completion(tasks) tasks = [] task = clf.classify_year(classifier, year) tasks.append(task) common.wait_for_task_completion(tasks)
def get_worldwide_sample_points(num_samples): asset_name = f'{base_asset_directory}/samples_{num_samples}' sample_fc = read_sample(asset_name) if sample_fc: return sample_fc print(f"creating sample {asset_name}") total_area = get_total_area() def sample_region(region_fc): total_sample_points = ee.Number(region_fc.aggregate_sum('sampleSize')) sample_image = ee.Image(1)\ .clipToCollection(region_fc) sampled_region = sample_image\ .sample( region=region_fc, numPixels=total_sample_points, projection=model_projection, scale=model_scale, geometries=True, seed=10 ) return sampled_region def set_num_samples_to_region(region_name): def set_num_samples_to_take(feature): region_sample_size = ee.Number(feature.geometry().area()).divide( total_area).multiply(num_samples).floor() return feature.set('sampleSize', region_sample_size) region_fc = region_boundaries(region_name) region_fc_with_sample_size = region_fc.map( lambda feature: set_num_samples_to_take(feature)) return region_fc_with_sample_size # find how many samples to take for each region, based on its area regions_with_sample_sizes = list( map(set_num_samples_to_region, world_regions)) # take that many samples for each region sample_points = list(map(sample_region, regions_with_sample_sizes)) # create a single region to export sample_fc = ee.FeatureCollection(sample_points).flatten() task = ee.batch.Export.table.toAsset(collection=sample_fc, assetId=asset_name, description=asset_name.replace( '/', '_')) task.start() wait_for_task_completion([task], exit_if_failures=True) return read_sample(asset_name)
def create_wireless_profile(api, workflow_dict): """ Creates DNA Center Wireless Profiles. Use for creation of new wireless profiles. If updating profile see "update_wireless_profile" :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of Wireless Profiles with associated parameters. (see schema.yaml); :returns: Nothing """ _schema = 'wireless_profiles.schema.wireless' logger.info('wireless::create_wireless_profile') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # Intent api broken when GET for wireless profile and no current values # _ent_profile_db = api.non_fabric_wireless.get_wireless_profile() # Work around Hack result = api.custom_caller.call_api('GET', temp_get_wireless_profile_url) _ent_profile_db = result['response'] # Get current profile names if _ent_profile_db: profile_list = [profile['name'] for profile in _ent_profile_db] else: profile_list = [] for row in table_data: if 'present' in row['presence']: # If sites comma separated, split them up into a list if "," in row['sites']: row['sites'] = row['sites'].split(",") row['sites'] = [item.lstrip() for item in row['sites']] else: row['sites'] = [row['sites']] if row['profileName'] in profile_list: logger.info('Profile: {} already exists'.format(row['profileName'])) pass else: logger.info('Creating Profile: {}'.format(row['profileName'])) data = common.build_json_from_template(templates.wireless_profile_j2, row) result = api.non_fabric_wireless.create_wireless_profile(payload=data) logger.debug(result) common.wait_for_task_completion(api, result) else: logger.error('schema not found: {}'.format(_schema))
def delete_all(api, workflow_dict): """ Deletes all switching Network Profiles regardless of input from workflow_dict. Wireless profile deletion is handled by wireless delete_all function. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: Not used; :returns: Nothing """ _schema = None logger.info('network_profiles::delete_network_profiles') logger.debug('schema: {}'.format(_schema)) # get profile list response = api.custom_caller.call_api('GET', network_profile_base_url, params={"namespace": "switching"}) if response['response']: profiles = response['response'] # get site ids sites = api.sites.get_site() # remove Profiles for profile in profiles: # get sites for profiles - why can't this be an easy one liner? profile_site_parameters = { "includeSites": True, "excludeSettings": True, "populated": False } response = api.custom_caller.call_api('GET', "{}/{}".format(network_profile_base_url, profile['siteProfileUuid']), params=profile_site_parameters) # if profile associated to site, delete site first from profile if response['response']['sites']: site_in_profile = response['response']['sites'] for site in site_in_profile: site_remove_url = "{}/{}/site/{}".format(network_profile_base_url, profile['siteProfileUuid'], site['uuid']) result = api.custom_caller.call_api('DELETE', site_remove_url) logger.debug(result) common.wait_for_task_completion(api, result['response']) # Now delete the profile logger.info('network profiles: Deleting {}'.format(profile['name'])) profile_url = "{}/{}".format(network_profile_base_url, profile['siteProfileUuid']) result = api.custom_caller.call_api('DELETE', profile_url) logger.debug(result) common.wait_for_task_completion(api, result['response'])
def update_wireless_profile(api, workflow_dict): """ Updates DNA Center Wireless Profiles. Use for updating of existing wireless profiles. If creating profile see "create_wireless_profile" :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of Wireless Profiles with associated parameters. (see schema.yaml); :returns: Nothing """ _schema = 'wireless_profiles.schema.wireless' logger.info('wireless::update_wireless_profile') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # API does not seem to like setting SSID info during create process so have to use update to add SSID details for row in table_data: if 'present' in row['presence']: # Get current profile names _ent_profile_db = api.non_fabric_wireless.get_wireless_profile() # Get Current Profiles profile_list = [profile['profileDetails']['name'] for profile in _ent_profile_db] # If sites comma separated, split them up into a list if "," in row['sites']: row['sites'] = row['sites'].split(",") row['sites'] = [item.lstrip() for item in row['sites']] else: row['sites'] = [row['sites']] # filter out mapping for specific profile in row filtered_mappings = [mappings for mappings in workflow_dict['ssid_to_profile_mapping.schema.wireless'] if mappings['profileName'] == row['profileName'] and mappings['presence'] == "present"] row['ssidDetails'] = filtered_mappings if row['profileName'] in profile_list: logger.info('Updating Profile: {}'.format(row['profileName'])) data = common.build_json_from_template(templates.wireless_profile_j2, row) result = api.non_fabric_wireless.update_wireless_profile(payload=data) logger.debug(result) common.wait_for_task_completion(api, result) else: logger.info('Profile: {} does not exist'.format(row['profileName'])) pass else: logger.error('schema not found: {}'.format(_schema))
def run_discovery(api, workflow_dict): logger.info('discovery::run_discovery') for key, value in workflow_dict.items(): if 'native' in key: _junk, _name, _key = key.split('?') # Cycle through the rows and create entries with 'present' set for row in value: if 'present' in row['presence'] and _name == 'discovery': # logger.info(row) _creds = [] _cli = api.network_discovery.get_global_credentials('CLI') _id = common.get_object_id(_cli['response'], description=row['cli']) _creds.append(_id) _cli = api.network_discovery.get_global_credentials( 'SNMPV2_READ_COMMUNITY') _id = common.get_object_id(_cli['response'], description=row['snmp_ro']) _creds.append(_id) _cli = api.network_discovery.get_global_credentials( 'SNMPV2_WRITE_COMMUNITY') _id = common.get_object_id(_cli['response'], description=row['snmp_rw']) _creds.append(_id) _discovery_range = '{}-{}'.format(row['startIp'], row['endIp']) _discovery = { "discoveryType": row['discoveryType'], "preferredMgmtIPMethod": row['preferredMgmtIPMethod'], "ipAddressList": _discovery_range, "protocolOrder": "ssh", "timeout": 5, "retry": 3, "globalCredentialIdList": _creds, "name": row['name'] } logger.info('Adding discovery ... ') result = api.network_discovery.start_discovery( payload=_discovery) common.wait_for_task_completion(api, result['response']) logger.debug(result)
def delete_all(api, workflow_dict): """ Deletes all Wireless Design Information regardless of input from workflow_dict. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: Not used; :returns: Nothing """ _schema = None logger.info('wireless::delete all') logger.debug('schema: {}'.format(_schema)) # SSID's to remove ssids = api.non_fabric_wireless.get_enterprise_ssid() # delete SSIDs if ssids: for ssid in ssids: for detail in ssid['ssidDetails']: logger.info('Delete Wireless::delete SSID {}'.format(detail['name'])) result = api.non_fabric_wireless.delete_enterprise_ssid(detail['name']) logger.debug(result) common.wait_for_task_completion(api, result) else: logger.info('Delete Wireless: No SSIDs to delete') # delete profiles try: profiles = api.non_fabric_wireless.get_wireless_profile() if profiles: for profile in profiles: no_site_profile = {'profileName': profile['profileDetails']['name'], 'sites': ""} # remove sites from profile data = common.build_json_from_template(templates.wireless_profile_j2, no_site_profile) result = api.non_fabric_wireless.update_wireless_profile(payload=data) logger.debug(result) common.wait_for_task_completion(api, result) # now good to delete profile logger.info('Delete Wireless::delete Profile {}'.format(profile['profileDetails']['name'])) wireless_profile_delete_url = "/dna/intent/api/v1/wireless-profile/{}".format( profile['profileDetails']['name']) result = api.custom_caller.call_api('DELETE', wireless_profile_delete_url) logger.debug(result) common.wait_for_task_completion(api, result) except: logger.info('delete_all::no wireless profiles found') # delete wireless interfaces. Can't delete management so we have to make sure it is added interface = json.dumps([{"interfaceName": "management", "vlanId": 0}]) data = common.build_json_from_template(templates.wireless_interface_j2, interface) result = api.custom_caller.call_api('POST', wireless_int_url, json=data) logger.debug(result) common.wait_for_task_completion(api, result['response'])
def delete(api, workflow_dict): """ Deletes DNA Center areas, sites and floors. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of sites (see schema.py); :returns: Nothing """ _schema = 'sites.schema.sites' logger.info('sites::delete') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] for row in table_data: _sites_db = api.sites.get_site() _deleted_sites = [] if 'absent' in row['presence']: site_name_hierarchy = '{}/{}'.format(row['parentName'], row['name']) _id = common.get_object_id(_sites_db['response'], siteNameHierarchy=site_name_hierarchy) if _id is not None: # When deleting a site we need to figure out the children and delete in reverse _child_list_sorted = _get_sorted_child_list(_sites_db['response'], _id) for _child in _child_list_sorted: logger.info('Deleting site: {}'.format(_child[0])) logger.debug('Deleting: {} with id: {}'.format(_child[0], _child[1])) if _child[1] not in _deleted_sites: result = api.sites.delete_site(site_id=_child[1]) status = common.wait_for_task_completion(api, result) logger.debug(status) _deleted_sites.append(_child[1]) else: logger.error('schema not found: {}'.format(_schema))
def delete_all(api, workflow_dict): """ Deletes ALL DNA Center areas, sites and floors. This task will calculate the order in which floors, sites, areas will need to be removed however it will not attempt to remove any dependencies such as IP Reservations or assigned devices. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: Not used; :returns: Nothing """ _schema = None logger.info('sites::delete_all') logger.debug('schema: {}'.format(_schema)) _sites_db = api.sites.get_site() _deleted_sites = [] for _site in _sites_db['response']: _id = _site['id'] _child_list_sorted = _get_sorted_child_list(_sites_db['response'], _id) for _child in _child_list_sorted: if _child[1] not in _deleted_sites and _child[0] != "Global": logger.info('Deleting site: {}'.format(_child[0])) logger.debug('Deleting: {} with id: {}'.format(_child[0], _child[1])) result = api.sites.delete_site(site_id=_child[1]) status = common.wait_for_task_completion(api, result) logger.debug(status) _deleted_sites.append(_child[1])
def delete_all_aaa(api, workflow_dict): """ Deletes all DNA Center Global AAA Servers. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: Not Used; :returns: Nothing """ _schema = None logger.info('system_settings::delete_all_aaa') logger.debug('schema: {}'.format(_schema)) # get Current AAA Servers and get a id list _aaa_db = api.custom_caller.call_api('GET', aaa_uri)['response'] if _aaa_db: aaa_ids = [(aaa['instanceUuid'], aaa['ipAddress']) for aaa in _aaa_db] for aaa_id in aaa_ids: logger.info('Deleting AAA: {}'.format(aaa_id[1])) result = api.custom_caller.call_api('DELETE', "{}/{}".format(aaa_uri, aaa_id[0])) status = common.wait_for_task_completion(api, result.response) logger.debug(status) else: logger.info('No AAA servers to Delete')
def delete_all_global_credentials(api, workflow_dict): """ Deletes All DNA Center Global Credentials. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: Not Used; :returns: Nothing """ _schema = None logger.info('network_settings::delete_all_global_credentials') logger.debug('schema: {}'.format(_schema)) logger.info('network_settings::delete_all_global_credentials') _credential_types = [ 'SNMPV2_WRITE_COMMUNITY', 'SNMPV2_READ_COMMUNITY', 'CLI', 'SNMPV3', 'HTTP_WRITE', 'HTTP_READ', 'NETCONF' ] for _type in _credential_types: _creds = api.network_discovery.get_global_credentials(_type) for _cred in _creds['response']: logger.info('Deleting {} with name: {} and id: {}'.format( _type, _cred['description'], _cred['id'])) result = api.network_discovery.delete_global_credentials_by_id( _cred['id']) status = common.wait_for_task_completion(api, result.response) logger.debug(status)
def provision_wireless_device(api, workflow_dict): """ Provisions Wireless LAN Controllers (WLCs) in Cisco DNA Center. Flexconnect and OTT interface parameters (if in use) are required input or provision will fail :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of Wireless WLCs with associated parameters. (see schema.yaml); :returns: Nothing """ _schema = 'wireless_provisioning.schema.wireless' logger.info('wireless::provision_wireless_device') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # get devices to be provisioned. new_provision_devices = [device for device in table_data if device['presence'] == 'present'] if new_provision_devices: for device in new_provision_devices: # break up managed locations if csv if "," in device['managedAPLocations']: device['managedAPLocations'] = device['managedAPLocations'].split(",") else: device['managedAPLocations'] = [device['managedAPLocations']] # #get IP interface configs for specific WLC # Needed for centrail SSID's but not yet implemented wireless_int_detail = [detail for detail in workflow_dict['wireless_profisioning_interface.schema.wireless'] if detail['deviceName'] == device['deviceName'] and detail['presence'] == 'present'] if wireless_int_detail: device['dynamicInterfaces'] = wireless_int_detail logger.info('Provisioning WLC: {}'.format(device['deviceName'])) data = common.build_json_from_template(templates.provision_j2, device) result = api.non_fabric_wireless.provision(payload=data) logger.debug(result) common.wait_for_task_completion(api, result, timeout=30) else: logger.info('Provisioning WLC: No WLCs to Provision') pass else: logger.error('schema not found: {}'.format(_schema))
def delete_reservations(api, delete_workflow_dict): logger.info('ip_pool::delete_reservations') for key, value in delete_workflow_dict.items(): if 'native' in key: _junk, _workflow_name, _table_name = key.split('?') # Cycle through the rows and create entries with 'present' set if _table_name == 'ip_reservations': _ip_pool_db = api.custom_caller.call_api('GET', pools_uri) _sites_db = api.sites.get_site() logger.debug('******** _ip_pool_db *********') logger.debug(_ip_pool_db) logger.debug('******** _sites_db *********') logger.debug(_sites_db) for row in value: if 'absent' in row['presence']: _site_id = common.get_object_id( _sites_db['response'], siteNameHierarchy=row['siteName']) groups_uri = '{}?siteId={}'.format( groups_uri_base, _site_id) _ip_groups_db = api.custom_caller.call_api( 'GET', groups_uri) _id = common.get_object_id(_ip_groups_db['response'], groupName=row['groupName']) if _id is not None: logger.info( 'Releasing reservation: {} with id: {}'.format( row['groupName'], _id)) delete_uri = '{}/{}'.format(groups_uri_base, _id) result = api.custom_caller.call_api( 'DELETE', delete_uri) logger.debug(result) if result.response.taskId: common.wait_for_task_completion( api, result.response)
def create_enterprise_ssid(api, workflow_dict): """ Creates DNA Center Enterprise SSIDs. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows of Enterprise SSID definitions (see schema.yaml); :returns: Nothing """ _schema = 'ssids.schema.wireless' logger.info('wireless::create_enterprise_ssid') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] _ent_ssid_db = api.non_fabric_wireless.get_enterprise_ssid() # Get existing SSID List ssid_list = [] for item in _ent_ssid_db,: if item: for wlan_profile in item: for ssid in wlan_profile['ssidDetails']: ssid_list.append(ssid['name']) else: ssid_list = [] for row in table_data: if 'present' in row['presence']: if row['name'] in ssid_list: logger.info('SSID: {} already exists'.format(row['name'])) pass else: logger.info('Creating Enterprise SSID: {}'.format(row['name'])) data = common.build_json_from_template(templates.ssid_j2, row) result = api.non_fabric_wireless.create_enterprise_ssid(payload=data) logger.debug(result) common.wait_for_task_completion(api, result) else: logger.error('schema not found: {}'.format(_schema))
def main(num_samples): model_year = '2000' # Siebert labels # Step 1/3: create or fetch sample points asset_description = f'training_sample{num_samples}_all_features_labels' image_asset_id = f'{model_snapshot_path_prefix}_{asset_description}_image' table_asset_id = f'{model_snapshot_path_prefix}_{asset_description}_table' ee.Initialize() sample_points_fc = get_worldwide_sample_points(num_samples) # Step 2/3: sample all features into an image features_labels_image = create_all_features_labels_image( sample_points_fc, model_year) task = ee.batch.Export.image.toAsset(crs=model_projection, image=features_labels_image, scale=model_scale, assetId=image_asset_id, description=asset_description) task.start() wait_for_task_completion([task], exit_if_failures=True) # Step 3/3: convert image into a table features_labels_image = ee.Image(image_asset_id) # For training sample, it is more efficient to export a table than a raster with (mostly) 0's training_fc = features_labels_image.sampleRegions( collection=sample_points_fc, projection=model_projection, scale=model_scale, geometries=True) task = ee.batch.Export.table.toAsset(collection=training_fc, assetId=table_asset_id, description=asset_description.replace( '/', '_')) task.start() wait_for_task_completion([task], exit_if_failures=True) # Step 3a: export to drive for offline model development export_asset_table_to_drive(table_asset_id)
def create_pools(api, workflow_dict): logger.info('ip_pool::create_pools') for key, value in workflow_dict.items(): if 'native' in key: _junk, _workflow_name, _table_name = key.split('?') # Cycle through the rows and create entries with 'present' set if _table_name == 'ip_pools': _ip_pool_db = api.custom_caller.call_api('GET', pools_uri) for row in value: if 'present' in row['presence']: _id = common.get_object_id( _ip_pool_db['response'], ipPoolName=row['ipPoolName']) if _id is not None: logger.info( 'IP Pool: {} already exists with id: {}'. format(row['ipPoolName'], _id)) pass else: logger.info('Creating IP Pool: {}'.format( row['ipPoolName'])) data = templates.ip_pool data['ipPoolName'] = row['ipPoolName'] data['ipPoolCidr'] = row['ipPoolCidr'] result = api.custom_caller.call_api('POST', pools_uri, json=data) logger.debug(result) if result.response.taskId: common.wait_for_task_completion( api, result.response)
def delete_network_settings(api, workflow_dict): """ Deletes DNA Center Global Credentials. A dummy DNS server and DNs domain are pushed to DNAC due to current limitations :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: Not Used; :returns: Nothing """ #I couldn't get the intent api to delete certain settings so used old api call with blank values populated to delete logger.info('network_settings::delete_network_credentials') data = json.loads(templates.network_settings_j2) result = api.custom_caller.call_api('POST', network_settings_uri, json=data) status = common.wait_for_task_completion(api, result['response']) logger.debug(status)
def provision_devices(api, workflow_dict): _schema = 'devices.schema.devices' logger.info('devices::provision_devices') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] sites_db = api.sites.get_site() devices_db = api.devices.get_device_list() for device in table_data: if device['presence'] == 'absent' or device[ 'provisionDevice'] == False: continue site_id = common.get_object_id( sites_db['response'], siteNameHierarchy=device['location']) device_id = common.get_object_id(devices_db['response'], hostname=device['hostname'], strict=False) device_name = common.get_object_id(devices_db['response'], return_param='hostname', hostname=device['hostname'], strict=False) logger.info('Provisioning for device: {}, at site: {}'.format( device['hostname'], device['location'])) payload_data = {} payload_data.update({'name': device_name}) payload_data.update({'networkDeviceId': device_id}) payload_data.update({'siteId': site_id}) data = common.build_json_from_template( templates.provision_device_j2, payload_data) result = api.custom_caller.call_api('POST', provision_device_url, json=data) status = common.wait_for_task_completion(api, result['response'], timeout=45, tree=True) logger.debug(status) else: logger.error('schema not found: {}'.format(_schema))
def add_aaa(api, workflow_dict): """ Creates DNA Center Global AAA Server based on input from workflow_dict. :param api: An instance of the dnacentersdk.DNACenterAPI class :param workflow_dict: A dictionary containing rows with AAA server settings required (see schema.yaml); :returns: Nothing """ _schema = 'aaa.schema.system_settings' logger.info('system_settings::add_aaa') logger.debug('schema: {}'.format(_schema)) if _schema in workflow_dict.keys(): table_data = workflow_dict[_schema] # get Current AAA Servers and get a name list _aaa_db = api.custom_caller.call_api('GET', aaa_uri)['response'] if _aaa_db: aaa_ips = [aaa['ipAddress'] for aaa in _aaa_db] else: aaa_ips = [] # process each row for row in table_data: # if present marked and ise not already added to dnac if row['presence'] == 'present': if row['ipAddress'] not in aaa_ips: logger.info('Adding AAA: {}. This may take some time...'.format(row['ipAddress'])) data = common.build_json_from_template(templates.aaa_j2, row) result = api.custom_caller.call_api('POST', aaa_uri, json=data) status = common.wait_for_task_completion(api, result.response, timeout=60) logger.debug(status) # if added successfully, wait until ise process is completed if not status['response']['isError']: logger.info('Waiting on AAA to move from INPROGRESS to ACTIVE') _wait_on_ise_completion(api, row) else: logger.error(status['response']['failureReason']) else: logger.error('schema not found: {}'.format(_schema))