def generate_data_stores_inventory(included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_envs=asterisk_tuple, excluded_envs=empty_tuple, config_dir=default_config_dir): user_config = get_config('userconfig', config_dir) env_names = superfilter(user_config['environments'].keys(), included_envs, excluded_envs) if len(env_names) == 0: raise RuntimeError('No environments specified!') for env_name in env_names: env = user_config['environments'][env_name] ags_instances = superfilter(env['ags_instances'].keys(), included_instances, excluded_instances) log.info('Listing data stores on ArcGIS Server instances {}'.format( ', '.join(ags_instances))) for ags_instance in ags_instances: ags_instance_props = env['ags_instances'][ags_instance] server_url = ags_instance_props['url'] token = ags_instance_props['token'] proxies = ags_instance_props.get('proxies') or user_config.get( 'proxies') with create_session(server_url, proxies=proxies) as session: data_stores = list_data_stores(server_url, token, session=session) for data_store in data_stores: yield dict(env_name=env_name, ags_instance=ags_instance, **data_store)
def test_services(included_services=asterisk_tuple, excluded_services=empty_tuple, included_service_folders=asterisk_tuple, excluded_service_folders=empty_tuple, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_envs=asterisk_tuple, excluded_envs=empty_tuple, warn_on_errors=False, config_dir=default_config_dir): user_config = get_config('userconfig', config_dir) env_names = superfilter(user_config['environments'].keys(), included_envs, excluded_envs) if len(env_names) == 0: raise RuntimeError('No environments specified!') for env_name in env_names: env = user_config['environments'][env_name] ags_instances = superfilter(env['ags_instances'].keys(), included_instances, excluded_instances) log.info('Testing services on ArcGIS Server instances {}'.format( ', '.join(ags_instances))) for ags_instance in ags_instances: ags_instance_props = env['ags_instances'][ags_instance] server_url = ags_instance_props['url'] token = ags_instance_props['token'] proxies = ags_instance_props.get('proxies') or user_config.get( 'proxies') with create_session(server_url, proxies=proxies) as session: service_folders = list_service_folders(server_url, token, session=session) for service_folder in superfilter(service_folders, included_service_folders, excluded_service_folders): for service in list_services(server_url, token, service_folder, session=session): service_name = service['serviceName'] service_type = service['type'] if superfilter((service_name, ), included_services, excluded_services): test_data = test_service(server_url, token, service_name, service_folder, service_type, warn_on_errors, session=session) yield dict(env_name=env_name, ags_instance=ags_instance, service_folder=service_folder, service_name=service_name, service_type=service_type, **test_data)
def generate_tokens(self, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_envs=asterisk_tuple, excluded_envs=empty_tuple, username=None, password=None, reuse_credentials=False, expiration=15): user_config = get_config('userconfig', self.config_dir) env_names = superfilter(user_config['environments'].keys(), included_envs, excluded_envs) if len(env_names) == 0: raise RuntimeError('No environments specified!') if reuse_credentials: username, password = prompt_for_credentials(username, password) needs_save = False for env_name in env_names: env = user_config['environments'][env_name] ags_instances = superfilter(env['ags_instances'].keys(), included_instances, excluded_instances) log.info( 'Refreshing tokens for ArcGIS Server instances: {}'.format( ', '.join(ags_instances))) for ags_instance in ags_instances: ags_instance_props = env['ags_instances'][ags_instance] server_url = ags_instance_props['url'] proxies = ags_instance_props.get('proxies') or user_config.get( 'proxies') with create_session(server_url, proxies=proxies) as session: new_token = generate_token(server_url, username, password, expiration, ags_instance, session=session) if new_token: ags_instance_props['token'] = new_token if not needs_save: needs_save = True if needs_save: set_config(user_config, 'userconfig', self.config_dir)
def publish_config( config, config_dir=default_config_dir, included_envs=asterisk_tuple, excluded_envs=empty_tuple, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_services=asterisk_tuple, excluded_services=empty_tuple, copy_source_files_from_staging_folder=True, cleanup_services=False, service_prefix='', service_suffix='', warn_on_publishing_errors=False, warn_on_validation_errors=False, create_backups=True, update_timestamps=True ): env_names = superfilter(config['environments'].keys(), included_envs, excluded_envs) if len(env_names) == 0: raise RuntimeError('No publishable environments specified!') log.info('Publishing environments: {}'.format(', '.join(env_names))) user_config = get_config('userconfig', config_dir) for env_name in env_names: env = config['environments'][env_name] ags_instances = superfilter(env['ags_instances'], included_instances, excluded_instances) if len(ags_instances) > 0: for result in publish_env( config, env_name, user_config, included_instances, excluded_instances, included_services, excluded_services, copy_source_files_from_staging_folder, cleanup_services, service_prefix, service_suffix, warn_on_publishing_errors, warn_on_validation_errors, create_backups, update_timestamps ): yield result else: log.warn('No publishable instances specified for environment {}'.format(env_name))
def get_configs(included_configs=asterisk_tuple, excluded_configs=empty_tuple, config_dir=default_config_dir): if len(included_configs) == 1 and included_configs[0] == '*': log.debug( 'No config names specified, reading all configs in directory: {}'. format(config_dir)) config_names = [ os.path.splitext(os.path.basename(config_file))[0] for config_file in superfilter(os.listdir(config_dir), inclusion_patterns=('*.yml', ), exclusion_patterns=( 'userconfig.yml', )) ] else: config_names = included_configs config_names = superfilter(config_names, included_configs, excluded_configs) log.debug('Getting configs \'{}\' in directory: {}'.format( ', '.join(config_names), config_dir)) return OrderedDict(((config_name, get_config(config_name, config_dir)) for config_name in config_names))
def cleanup_env( config, env_name, included_instances=asterisk_tuple, excluded_instances=empty_tuple, config_dir=default_config_dir ): env = config['environments'][env_name] ags_instances = superfilter(env['ags_instances'], included_instances, excluded_instances) if len(ags_instances) == 0: raise RuntimeError('No cleanable instances specified!') user_config = get_config('userconfig', config_dir) for ags_instance in ags_instances: cleanup_instance(ags_instance, env_name, config, user_config)
def cleanup_config( config, included_envs=asterisk_tuple, excluded_envs=empty_tuple, included_instances=asterisk_tuple, excluded_instances=empty_tuple, config_dir=default_config_dir ): env_names = superfilter(config['environments'].keys(), included_envs, excluded_envs) if len(env_names) == 0: raise RuntimeError('No cleanable environments specified!') log.info('Cleaning environments: {}'.format(', '.join(env_names))) for env_name in env_names: cleanup_env(config, env_name, included_instances, excluded_instances, config_dir)
def batch_import_sde_connection_files( self, included_connection_files=asterisk_tuple, excluded_connection_files=empty_tuple, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_envs=asterisk_tuple, excluded_envs=empty_tuple): log.info('Batch importing SDE connection files') user_config = get_config('userconfig', self.config_dir) env_names = superfilter(user_config['environments'].keys(), included_envs, excluded_envs) if len(env_names) == 0: raise RuntimeError('No environments specified!') for env_name in env_names: env = user_config['environments'][env_name] sde_connections_dir = env['sde_connections_dir'] sde_connection_files = superfilter([ os.path.splitext(os.path.basename(sde_connection_file))[0] for sde_connection_file in list_sde_connection_files_in_folder( sde_connections_dir) ], included_connection_files, excluded_connection_files) ags_instances = superfilter(env['ags_instances'].keys(), included_instances, excluded_instances) log.info( 'Importing SDE connection files for ArcGIS Server instances: {}' .format(', '.join(ags_instances))) for ags_instance in ags_instances: ags_instance_props = env['ags_instances'][ags_instance] ags_connection = ags_instance_props['ags_connection'] for sde_connection_file in sde_connection_files: import_sde_connection_file( ags_connection, os.path.join(sde_connections_dir, sde_connection_file + '.sde'))
def restart_services(included_services=asterisk_tuple, excluded_services=empty_tuple, included_service_folders=asterisk_tuple, excluded_service_folders=empty_tuple, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_envs=asterisk_tuple, excluded_envs=empty_tuple, include_running_services=True, delay=30, max_retries=3, test_after_restart=True, config_dir=default_config_dir): user_config = get_config('userconfig', config_dir) env_names = superfilter(user_config['environments'].keys(), included_envs, excluded_envs) if len(env_names) == 0: raise RuntimeError('No environments specified!') for env_name in env_names: env = user_config['environments'][env_name] ags_instances = superfilter(env['ags_instances'].keys(), included_instances, excluded_instances) log.info('Restarting services on ArcGIS Server instances {}'.format( ', '.join(ags_instances))) for ags_instance in ags_instances: ags_instance_props = env['ags_instances'][ags_instance] server_url = ags_instance_props['url'] token = ags_instance_props['token'] proxies = ags_instance_props.get('proxies') or user_config.get( 'proxies') with create_session(server_url, proxies=proxies) as session: service_folders = list_service_folders(server_url, token, session=session) for service_folder in superfilter(service_folders, included_service_folders, excluded_service_folders): for service in list_services(server_url, token, service_folder, session=session): service_name = service['serviceName'] service_type = service['type'] if superfilter((service_name, ), included_services, excluded_services): if not include_running_services: status = get_service_status(server_url, token, service_name, service_folder, service_type, session=session) configured_state = status.get( 'configuredState') if configured_state == 'STARTED': log.debug( 'Skipping restart of service {}/{} ({}) because its configured state is {} and include_running_services is {}' .format(service_folder, service_name, service_type, configured_state, include_running_services)) continue restart_service(server_url, token, service_name, service_folder, service_type, delay, max_retries, test_after_restart, session=session) restart_service(server_url, token, service_name, service_folder, service_type, delay, max_retries, test_after_restart, session=session)
def find_service_dataset_usages(included_datasets=asterisk_tuple, excluded_datasets=empty_tuple, included_users=asterisk_tuple, excluded_users=empty_tuple, included_databases=asterisk_tuple, excluded_databases=empty_tuple, included_versions=asterisk_tuple, excluded_versions=empty_tuple, included_services=asterisk_tuple, excluded_services=empty_tuple, included_service_folders=asterisk_tuple, excluded_service_folders=empty_tuple, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_envs=asterisk_tuple, excluded_envs=empty_tuple, config_dir=default_config_dir): user_config = get_config('userconfig', config_dir) env_names = superfilter(user_config['environments'].keys(), included_envs, excluded_envs) if len(env_names) == 0: raise RuntimeError('No environments specified!') for env_name in env_names: env = user_config['environments'][env_name] ags_instances = superfilter(env['ags_instances'].keys(), included_instances, excluded_instances) log.info( 'Finding service dataset usages on ArcGIS Server instances {}'. format(', '.join(ags_instances))) for ags_instance in ags_instances: ags_instance_props = env['ags_instances'][ags_instance] server_url = ags_instance_props['url'] token = ags_instance_props['token'] proxies = ags_instance_props.get('proxies') or user_config.get( 'proxies') with create_session(server_url, proxies=proxies) as session: service_folders = list_service_folders(server_url, token, session=session) for service_folder in superfilter(service_folders, included_service_folders, excluded_service_folders): for service in list_services(server_url, token, service_folder, session=session): service_name = service['serviceName'] service_type = service['type'] service_props = dict(env_name=env_name, ags_instance=ags_instance, service_folder=service_folder, service_name=service_name, service_type=service_type) if superfilter((service_name, ), included_services, excluded_services): for dataset_props in list_service_workspaces( server_url, token, service_name, service_folder, service_type, session=session): if (superfilter( (dataset_props['dataset_name'], ), included_datasets, excluded_datasets) and superfilter( (dataset_props['user'], ), included_users, excluded_users) and superfilter( (dataset_props['database'], ), included_databases, excluded_databases) and superfilter( (dataset_props['version'], ), included_versions, excluded_versions)): yield dict( chain(service_props.iteritems(), dataset_props.iteritems()))
def list_service_layer_fields(included_envs=asterisk_tuple, excluded_envs=empty_tuple, included_service_folders=asterisk_tuple, excluded_service_folders=empty_tuple, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_services=asterisk_tuple, excluded_services=empty_tuple, warn_on_errors=False, config_dir=default_config_dir): import arcpy arcpy.env.overwriteOutput = True user_config = get_config('userconfig', config_dir) env_names = superfilter(user_config['environments'].keys(), included_envs, excluded_envs) for env_name in env_names: log.debug( 'Listing service layers and fields for environment {}'.format( env_name)) env = user_config['environments'][env_name] for ags_instance in superfilter(env['ags_instances'], included_instances, excluded_instances): ags_instance_props = user_config['environments'][env_name][ 'ags_instances'][ags_instance] ags_connection = ags_instance_props['ags_connection'] server_url = ags_instance_props['url'] token = ags_instance_props['token'] proxies = ags_instance_props.get('proxies') or user_config.get( 'proxies') with create_session(server_url, proxies=proxies) as session: service_folders = list_service_folders(server_url, token, session=session) for service_folder in superfilter(service_folders, included_service_folders, excluded_service_folders): for service in list_services(server_url, token, service_folder, session=session): service_name = service['serviceName'] service_type = service['type'] if (service_type == 'MapServer' and superfilter( (service_name, ), included_services, excluded_services)): service_props = dict(env_name=env_name, ags_instance=ags_instance, service_folder=service_folder, service_name=service_name, service_type=service_type, ags_connection=ags_connection) try: service_manifest = get_service_manifest( server_url, token, service_name, service_folder, service_type, session=session) service_props[ 'mxd_path'] = mxd_path = service_manifest[ 'resources'][0]['onPremisePath'] log.info( 'Listing layers and fields for {service_type} service {service_folder}/{service_name} ' 'on ArcGIS Server instance {ags_instance} ' '(Connection File: {ags_connection}, MXD Path: {mxd_path})' .format(**service_props)) if not arcpy.Exists(mxd_path): raise RuntimeError( 'MXD {} does not exist!'.format( mxd_path)) mxd = open_mxd(mxd_path) for layer in list_layers_in_mxd(mxd): if not ((hasattr(layer, 'isGroupLayer') and layer.isGroupLayer) or (hasattr(layer, 'isRasterLayer') and layer.isRasterLayer)): layer_name = getattr( layer, 'longName', layer.name) try: layer_props = get_layer_properties( layer) except StandardError as e: log.exception( 'An error occurred while retrieving properties for layer {} in MXD {}' .format(layer_name, mxd_path)) if not warn_on_errors: raise else: yield dict( error= 'Error retrieving layer properties: {}' .format(e.message), layer_name=layer_name, **service_props) continue try: if layer_props['is_broken']: raise RuntimeError( 'Layer\'s data source is broken (Layer: {}, Data Source: {})' .format( layer_name, getattr( layer, 'dataSource', 'n/a'))) for field_props in get_layer_fields( layer): field_props['needs_index'] = not field_props[ 'has_index'] and ( field_props[ 'in_definition_query'] or field_props[ 'in_label_class_expression'] or field_props[ 'in_label_class_sql_query'] or field_props[ 'field_name'] == layer_props[ 'symbology_field'] or field_props[ 'field_type'] == 'Geometry') yield dict( chain( service_props. iteritems(), layer_props.iteritems( ), field_props.iteritems( ))) except StandardError as e: log.exception( 'An error occurred while listing fields for layer {} in MXD {}' .format(layer_name, mxd_path)) if not warn_on_errors: raise else: yield dict( chain( service_props. iteritems(), layer_props.iteritems( )), error= 'Error retrieving layer fields: {}' .format(e.message)) except StandardError as e: log.exception( 'An error occurred while listing layers and fields for ' '{service_type} service {service_folder}/{service_name} on ' 'ArcGIS Server instance {ags_instance} (Connection File: {ags_connection})' .format(**service_props)) if not warn_on_errors: raise else: yield dict(error=e.message, **service_props)
def analyze_services(included_envs=asterisk_tuple, excluded_envs=empty_tuple, included_service_folders=asterisk_tuple, excluded_service_folders=empty_tuple, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_services=asterisk_tuple, excluded_services=empty_tuple, warn_on_errors=True, config_dir=default_config_dir): import arcpy arcpy.env.overwriteOutput = True user_config = get_config('userconfig', config_dir) env_names = superfilter(user_config['environments'].keys(), included_envs, excluded_envs) for env_name in env_names: log.debug('Analyzing services for environment {}'.format(env_name)) env = user_config['environments'][env_name] for ags_instance in superfilter(env['ags_instances'], included_instances, excluded_instances): ags_instance_props = user_config['environments'][env_name][ 'ags_instances'][ags_instance] ags_connection = ags_instance_props['ags_connection'] server_url = ags_instance_props['url'] token = ags_instance_props['token'] proxies = ags_instance_props.get('proxies') or user_config.get( 'proxies') with create_session(server_url, proxies=proxies) as session: service_folders = list_service_folders(server_url, token, session=session) for service_folder in superfilter(service_folders, included_service_folders, excluded_service_folders): for service in list_services(server_url, token, service_folder, session=session): service_name = service['serviceName'] service_type = service['type'] if (service_type in ('MapServer', 'GeocodeServer') and superfilter( (service_name, ), included_services, excluded_services)): service_props = dict(env_name=env_name, ags_instance=ags_instance, service_folder=service_folder, service_name=service_name, service_type=service_type) try: service_manifest = get_service_manifest( server_url, token, service_name, service_folder, service_type, session=session) service_props[ 'file_path'] = file_path = service_manifest[ 'resources'][0]['onPremisePath'] file_type = { 'MapServer': 'MXD', 'GeocodeServer': 'Locator' }[service_type] log.info( 'Analyzing {} service {}/{} on ArcGIS Server instance {} (Connection File: {}, {} Path: {})' .format(service_type, service_folder, service_name, ags_instance, ags_connection, file_type, file_path)) if not arcpy.Exists(file_path): raise RuntimeError( '{} {} does not exist!'.format( file_type, file_path)) try: tempdir = tempfile.mkdtemp() log.debug( 'Temporary directory created: {}'. format(tempdir)) sddraft = os.path.join( tempdir, service_name + '.sddraft') log.debug( 'Creating SDDraft file: {}'.format( sddraft)) if service_type == 'MapServer': mxd = open_mxd(file_path) analysis = arcpy.mapping.CreateMapSDDraft( mxd, sddraft, service_name, 'FROM_CONNECTION_FILE', ags_connection, False, service_folder) elif service_type == 'GeocodeServer': locator_path = file_path analysis = arcpy.CreateGeocodeSDDraft( locator_path, sddraft, service_name, 'FROM_CONNECTION_FILE', ags_connection, False, service_folder) else: raise RuntimeError( 'Unsupported service type {}!'. format(service_type)) for key, log_method in (('messages', log.info), ('warnings', log.warn), ('errors', log.error)): items = analysis[key] severity = key[:-1].title() if items: log.info('----' + key.upper() + '---') for ((message, code), layerlist ) in items.iteritems(): code = '{:05d}'.format(code) log_method( ' {} (CODE {})'.format( message, code)) code = '="{}"'.format(code) issue_props = dict( severity=severity, code=code, message=message) if not layerlist: yield dict( chain( service_props. iteritems(), issue_props. iteritems())) else: log_method( ' applies to:') for layer in layerlist: layer_name = layer.longName if hasattr( layer, 'longName' ) else layer.name layer_props = dict( dataset_name=layer. datasetName, workspace_path=layer .workspacePath, layer_name= layer_name) log_method( ' {}'. format(layer_name)) yield dict( chain( service_props. iteritems(), issue_props. iteritems(), layer_props. iteritems())) log_method('') if analysis['errors']: error_message = 'Analysis failed for service {}/{} at {:%#m/%#d/%y %#I:%M:%S %p}' \ .format(service_folder, service_name, datetime.datetime.now()) log.error(error_message) raise RuntimeError( error_message, analysis['errors']) finally: log.debug( 'Cleaning up temporary directory: {}'. format(tempdir)) rmtree(tempdir, ignore_errors=True) except StandardError as e: log.exception( 'An error occurred while analyzing {} service {}/{} on ArcGIS Server instance {}' .format(service_type, service_folder, service_name, ags_instance)) if not warn_on_errors: raise else: yield dict(severity='Error', message=e.message, **service_props)
def publish_env( config, env_name, user_config, included_instances=asterisk_tuple, excluded_instances=empty_tuple, included_services=asterisk_tuple, excluded_services=empty_tuple, copy_source_files_from_staging_folder=True, cleanup_services=False, service_prefix='', service_suffix='', warn_on_publishing_errors=False, warn_on_validation_errors=False, create_backups=True, update_timestamps=True ): env = config['environments'][env_name] source_dir = env['source_dir'] ags_instances = superfilter(env['ags_instances'], included_instances, excluded_instances) services = superfilter(config['services'], included_services, excluded_services) service_folder = config.get('service_folder', os.path.basename(source_dir)) default_service_properties = config.get('default_service_properties') env_service_properties = env.get('service_properties', {}) data_source_mappings = env.get('data_source_mappings', {}) staging_dir = env.get('staging_dir') if not default_service_properties: log.debug('No default service properties specified') if len(ags_instances) == 0: raise RuntimeError('No publishable instances specified!') if len(services) == 0: raise RuntimeError('No publishable services specified!') log.info( 'Publishing environment: {}, service folder: {}, ArcGIS Server instances: {}' .format(env_name, service_folder, ', '.join(ags_instances)) ) source_info, errors = get_source_info( services, source_dir, staging_dir, default_service_properties, env_service_properties ) if len(errors) > 0: message = 'One or more errors occurred while validating the {} environment for service folder {}:\n{}' \ .format(env_name, service_folder, '\n'.join(errors)) if warn_on_validation_errors: log.warn(message) else: raise RuntimeError(message) initial_site_modes = get_site_modes(ags_instances, env_name, user_config) make_sites_editable(ags_instances, env_name, user_config, initial_site_modes) try: for result in publish_services( services, user_config, ags_instances, env_name, default_service_properties, env_service_properties, source_info, source_dir, staging_dir, data_source_mappings, service_folder, copy_source_files_from_staging_folder, service_prefix, service_suffix, warn_on_publishing_errors, create_backups, update_timestamps ): yield result finally: restore_site_modes(ags_instances, env_name, user_config, initial_site_modes) if cleanup_services: for ags_instance in ags_instances: cleanup_instance(ags_instance, env_name, config, user_config)