def main(): """ Main entry point. :return dict: ansible facts """ argument_spec = dict( function_name=dict(required=False, default=None, aliases=['function', 'name']), query=dict(required=False, choices=['aliases', 'all', 'config', 'mappings', 'policy', 'versions', 'tags'], default='all'), event_source_arn=dict(required=False, default=None), ) module = AnsibleAWSModule( argument_spec=argument_spec, supports_check_mode=True, mutually_exclusive=[], required_together=[] ) # validate function_name if present function_name = module.params['function_name'] if function_name: if not re.search(r"^[\w\-:]+$", function_name): module.fail_json( msg='Function name {0} is invalid. Names must contain only alphanumeric characters and hyphens.'.format(function_name) ) if len(function_name) > 64: module.fail_json(msg='Function name "{0}" exceeds 64 character limit'.format(function_name)) client = module.client('lambda', retry_decorator=AWSRetry.jittered_backoff()) invocations = dict( aliases='alias_details', all='all_details', config='config_details', mappings='mapping_details', policy='policy_details', versions='version_details', tags='tags_details', ) this_module_function = globals()[invocations[module.params['query']]] all_facts = fix_return(this_module_function(client, module)) results = dict(function=all_facts, changed=False) if module.check_mode: results['msg'] = 'Check mode set but ignored for fact gathering only.' module.exit_json(**results)
def main(): argument_spec = dict( id=dict(required=False, aliases=['firewall_rule_group_id', 'resolver_endpoint_id']), list_firewall_configs=dict(required=False, type=bool), list_firewall_domain_lists=dict(required=False, type=bool), list_firewall_rule_groups=dict(required=False, type=bool), list_firewall_rules=dict(required=False, type=bool), list_resolver_dnssec_configs=dict(required=False, type=bool), list_resolver_endpoint_ip_addresses=dict(required=False, type=bool), list_resolver_endpoints=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=( ('list_firewall_rules', True, ['id']), ('list_resolver_endpoint_ip_addresses', True, ['id']), ), mutually_exclusive=[ ( 'list_firewall_configs', 'list_firewall_domain_lists', 'list_firewall_rule_groups', 'list_firewall_rules', 'list_resolver_dnssec_configs', 'list_resolver_endpoint_ip_addresses', 'list_resolver_endpoints', ) ], ) client = module.client('route53resolver', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _route53resolver(client, module) if module.params['list_firewall_configs']: module.exit_json(firewall_configs=aws_response_list_parser(paginate, it, 'FirewallConfigs')) elif module.params['list_firewall_domain_lists']: module.exit_json(firewall_domain_lists=aws_response_list_parser(paginate, it, 'FirewallDomainLists')) elif module.params['list_firewall_rule_groups']: module.exit_json(firewall_rule_groups=aws_response_list_parser(paginate, it, 'FirewallRuleGroups')) elif module.params['list_firewall_rules']: module.exit_json(firewall_rules=aws_response_list_parser(paginate, it, 'FirewallRules')) elif module.params['list_resolver_dnssec_configs']: module.exit_json(resolver_dnssec_configs=aws_response_list_parser(paginate, it, 'ResolverDnssecConfigs')) elif module.params['list_resolver_endpoint_ip_addresses']: module.exit_json(resolver_endpoint_ip_addresses=aws_response_list_parser(paginate, it, 'IpAddresses')) elif module.params['list_resolver_endpoints']: module.exit_json(resolver_endpoints=aws_response_list_parser(paginate, it, 'ResolverEndpoints')) else: module.fail_json("unknown options are passed")
def main(): global module global client argument_spec = dict( zone=dict(required=True), state=dict(default='present', choices=['present', 'absent']), vpc_id=dict(default=None), vpc_region=dict(default=None), comment=dict(default=''), hosted_zone_id=dict(), delegation_set_id=dict(), tags=dict(type='dict'), purge_tags=dict(type='bool', default=False), ) mutually_exclusive = [ ['delegation_set_id', 'vpc_id'], ['delegation_set_id', 'vpc_region'], ] module = AnsibleAWSModule( argument_spec=argument_spec, mutually_exclusive=mutually_exclusive, supports_check_mode=True, ) zone_in = module.params.get('zone').lower() state = module.params.get('state').lower() vpc_id = module.params.get('vpc_id') vpc_region = module.params.get('vpc_region') if not zone_in.endswith('.'): zone_in += "." private_zone = bool(vpc_id and vpc_region) client = module.client('route53', retry_decorator=AWSRetry.jittered_backoff()) zones = find_zones(zone_in, private_zone) if state == 'present': changed, result = create(matching_zones=zones) elif state == 'absent': changed, result = delete(matching_zones=zones) if isinstance(result, dict): module.exit_json(changed=changed, result=result, **result) else: module.exit_json(changed=changed, result=result)
def existing_templates(module): ec2 = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff()) matches = None try: if module.params.get('template_id'): matches = ec2.describe_launch_templates( LaunchTemplateIds=[module.params.get('template_id')], aws_retry=True) elif module.params.get('template_name'): matches = ec2.describe_launch_templates( LaunchTemplateNames=[module.params.get('template_name')], aws_retry=True) except is_boto3_error_code( 'InvalidLaunchTemplateName.NotFoundException') as e: # no named template was found, return nothing/empty versions return None, [] except is_boto3_error_code('InvalidLaunchTemplateId.Malformed') as e: # pylint: disable=duplicate-except module.fail_json_aws( e, msg= 'Launch template with ID {0} is not a valid ID. It should start with `lt-....`' .format(module.params.get('launch_template_id'))) except is_boto3_error_code( 'InvalidLaunchTemplateId.NotFoundException') as e: # pylint: disable=duplicate-except module.fail_json_aws( e, msg= 'Launch template with ID {0} could not be found, please supply a name ' 'instead so that a new template can be created'.format( module.params.get('launch_template_id'))) except (ClientError, BotoCoreError, WaiterError) as e: # pylint: disable=duplicate-except module.fail_json_aws( e, msg= 'Could not check existing launch templates. This may be an IAM permission problem.' ) else: template = matches['LaunchTemplates'][0] template_id, template_version, template_default = template[ 'LaunchTemplateId'], template['LatestVersionNumber'], template[ 'DefaultVersionNumber'] try: return template, ec2.describe_launch_template_versions( LaunchTemplateId=template_id, aws_retry=True)['LaunchTemplateVersions'] except (ClientError, BotoCoreError, WaiterError) as e: module.fail_json_aws( e, msg='Could not find launch template versions for {0} (ID: {1}).' .format(template['LaunchTemplateName'], template_id))
def get_account_info(module): """return the account information (account id and partition) we are currently working on get_account_info tries too find out the account that we are working on. It's not guaranteed that this will be easy so we try in several different ways. Giving either IAM or STS privileges to the account should be enough to permit this. """ account_id = None partition = None try: sts_client = module.client('sts', retry_decorator=AWSRetry.jittered_backoff()) caller_id = sts_client.get_caller_identity(aws_retry=True) account_id = caller_id.get('Account') partition = caller_id.get('Arn').split(':')[1] except (BotoCoreError, ClientError): try: iam_client = module.client( 'iam', retry_decorator=AWSRetry.jittered_backoff()) arn, partition, service, reg, account_id, resource = iam_client.get_user( aws_retry=True)['User']['Arn'].split(':') except is_boto3_error_code('AccessDenied') as e: try: except_msg = to_native(e.message) except AttributeError: except_msg = to_native(e) m = re.search(r"arn:(aws(-([a-z\-]+))?):iam::([0-9]{12,32}):\w+/", except_msg) if m is None: module.fail_json_aws(e, msg="getting account information") account_id = m.group(4) partition = m.group(1) except (BotoCoreError, ClientError) as e: # pylint: disable=duplicate-except module.fail_json_aws(e, msg="getting account information") return account_id, partition
def main(): argument_spec = dict( identity_type=dict(required=False, choices=['EmailAddress', 'Domain'], default='EmailAddress'), list_configuration_sets=dict(required=False, type=bool), list_custom_verification_email_templates=dict(required=False, type=bool), list_identities=dict(required=False, type=bool), list_receipt_rule_sets=dict(required=False, type=bool), list_templates=dict(required=False, type=bool), list_resolver_endpoint_ip_addresses=dict(required=False, type=bool), list_resolver_endpoints=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=(('list_identities', True, ['identity_type']), ), mutually_exclusive=[( 'list_configuration_sets', 'list_custom_verification_email_templates', 'list_identities', 'list_receipt_rule_sets', 'list_templates', )], ) client = module.client('ses', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _ses(client, module) if module.params['list_configuration_sets']: module.exit_json(configuration_sets=aws_response_list_parser( paginate, it, 'ConfigurationSets')) elif module.params['list_custom_verification_email_templates']: module.exit_json( custom_verification_email_templates=aws_response_list_parser( paginate, it, 'CustomVerificationEmailTemplates')) elif module.params['list_identities']: module.exit_json( identities=aws_response_list_parser(paginate, it, 'Identities')) elif module.params['list_receipt_rule_sets']: module.exit_json(receipt_rule_sets=aws_response_list_parser( paginate, it, 'RuleSets')) elif module.params['list_templates']: module.exit_json(templates=aws_response_list_parser( paginate, it, 'TemplatesMetadata')) else: module.fail_json("unknown options are passed")
def main(): argument_spec = dict( list_dataset_groups=dict(required=False, type=bool), list_dataset_import_jobs=dict(required=False, type=bool), list_datasets=dict(required=False, type=bool), list_forecast_export_jobs=dict(required=False, type=bool), list_predictor_backtest_export_jobs=dict(required=False, type=bool), list_predictors=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=(), mutually_exclusive=[( 'list_dataset_groups', 'list_dataset_import_jobs', 'list_datasets', 'list_forecast_export_jobs', 'list_predictor_backtest_export_jobs', 'list_predictors', )], ) client = module.client('forecast', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _forecast(client, module) if module.params['list_dataset_groups']: module.exit_json(dataset_groups=aws_response_list_parser( paginate, it, 'DatasetGroups')) elif module.params['list_dataset_import_jobs']: module.exit_json(dataset_import_jobs=aws_response_list_parser( paginate, it, 'DatasetImportJobs')) elif module.params['list_datasets']: module.exit_json( datasets=aws_response_list_parser(paginate, it, 'Datasets')) elif module.params['list_forecast_export_jobs']: module.exit_json(forecast_export_jobs=aws_response_list_parser( paginate, it, 'ForecastExportJobs')) elif module.params['list_predictor_backtest_export_jobs']: module.exit_json( predictor_backtest_export_jobs=aws_response_list_parser( paginate, it, 'PredictorBacktestExportJobs')) elif module.params['list_predictors']: module.exit_json( predictors=aws_response_list_parser(paginate, it, 'Predictors')) else: module.exit_json( forecasts=aws_response_list_parser(paginate, it, 'Forecasts'))
def main(): argument_spec = dict( alarm_names=dict(required=False, type=list, default=[]), dashboard_name=dict(required=False), alarm_types=dict(required=False, type=list, default=[]), alarm_state=dict(required=False, choices=['OK', 'ALARM', 'INSUFFICIENT_DATA'], default='OK'), name_space=dict(required=False), metric_name=dict(required=False), describe_alarms=dict(required=False, type=bool), get_dashboard=dict(required=False, type=bool), describe_anomaly_detectors=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=( ('describe_alarms', True, ['alarm_names', 'alarm_types', 'alarm_state']), ('get_dashboard', True, ['dashboard_name']), ('describe_anomaly_detectors', True, ['name_space', 'metric_name']), ), mutually_exclusive=[('describe_alarms', 'get_dashboard', 'describe_anomaly_detectors'), ('alarm_names', 'dashboard_name')], ) client = module.client('cloudwatch', retry_decorator=AWSRetry.exponential_backoff()) _it, paginate = _cloudtrail(client, module) if module.params['describe_alarms']: composite_alarms = aws_response_list_parser(paginate, _it, 'CompositeAlarms') metric_alarms = aws_response_list_parser(paginate, _it, 'MetricAlarms') module.exit_json(alarms={ "composite_alarms": composite_alarms, "metric_alarms": metric_alarms }) elif module.params['get_dashboard']: module.exit_json(dashboard=camel_dict_to_snake_dict(_it)) elif module.params['describe_anomaly_detectors']: module.exit_json(anomaly_detectors=aws_response_list_parser( paginate, _it, 'AnomalyDetectors')) else: module.exit_json(dashboards=aws_response_list_parser( paginate, _it, 'DashboardEntries'))
def delete_template(module): ec2 = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff()) template, template_versions = existing_templates(module) deleted_versions = [] if template or template_versions: non_default_versions = [ to_text(t['VersionNumber']) for t in template_versions if not t['DefaultVersion'] ] if non_default_versions: try: v_resp = ec2.delete_launch_template_versions( LaunchTemplateId=template['LaunchTemplateId'], Versions=non_default_versions, ) if v_resp['UnsuccessfullyDeletedLaunchTemplateVersions']: module.warn( 'Failed to delete template versions {0} on launch template {1}' .format( v_resp[ 'UnsuccessfullyDeletedLaunchTemplateVersions'], template['LaunchTemplateId'], )) deleted_versions = [ camel_dict_to_snake_dict(v) for v in v_resp['SuccessfullyDeletedLaunchTemplateVersions'] ] except (ClientError, BotoCoreError) as e: module.fail_json_aws( e, msg= "Could not delete existing versions of the launch template {0}" .format(template['LaunchTemplateId'])) try: resp = ec2.delete_launch_template( LaunchTemplateId=template['LaunchTemplateId'], ) except (ClientError, BotoCoreError) as e: module.fail_json_aws( e, msg="Could not delete launch template {0}".format( template['LaunchTemplateId'])) return { 'deleted_versions': deleted_versions, 'deleted_template': camel_dict_to_snake_dict(resp['LaunchTemplate']), 'changed': True, } else: return {'changed': False}
def main(): argument_spec = dict( project_name=dict(required=False), dataset_name=dict(required=False), job_name=dict(required=False), list_datasets=dict(required=False, type=bool), list_jobs=dict(required=False, type=bool), list_job_runs=dict(required=False, type=bool), list_recipes=dict(required=False, type=bool), list_schedules=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=( ('list_jobs', True, ['project_name', 'dataset_name']), ('list_job_runs', True, ['job_name']), ('list_schedules', True, ['job_name']), ), mutually_exclusive=[( 'list_datasets', 'list_jobs', 'list_job_runs', 'list_recipes', 'list_schedules', )], ) client = module.client('databrew', retry_decorator=AWSRetry.exponential_backoff()) _it, paginate = _databrew(client, module) if module.params['list_datasets']: module.exit_json( datasets=aws_response_list_parser(paginate, _it, 'Datasets')) elif module.params['list_jobs']: module.exit_json(jobs=aws_response_list_parser(paginate, _it, 'Jobs')) elif module.params['list_job_runs']: module.exit_json( job_runs=aws_response_list_parser(paginate, _it, 'JobRuns')) elif module.params['list_recipes']: module.exit_json( recipes=aws_response_list_parser(paginate, _it, 'Recipes')) elif module.params['list_schedules']: module.exit_json( schedules=aws_response_list_parser(paginate, _it, 'Schedules')) else: module.exit_json( projects=aws_response_list_parser(paginate, _it, 'Projects'))
def main(): argument_spec = dict() module = AnsibleAWSModule( argument_spec=argument_spec, required_if=(), mutually_exclusive=[], ) client = module.client('cur', retry_decorator=AWSRetry.exponential_backoff()) _it, paginate = _cur(client, module) module.exit_json(report_definitions=aws_response_list_parser( paginate, _it, 'ReportDefinitions'))
def get_eips_details(module): connection = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff()) filters = module.params.get("filters") try: response = connection.describe_addresses( aws_retry=True, Filters=ansible_dict_to_boto3_filter_list(filters)) except (BotoCoreError, ClientError) as e: module.fail_json_aws(e, msg="Error retrieving EIPs") addresses = camel_dict_to_snake_dict(response)['addresses'] for address in addresses: if 'tags' in address: address['tags'] = boto3_tag_list_to_ansible_dict(address['tags']) return addresses
def main(): argument_spec = dict(filters=dict(default=None, type='dict'), ) module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True) if module._name == 'ec2_vpc_route_table_facts': module.deprecate( "The 'ec2_vpc_route_table_facts' module has been renamed to 'ec2_vpc_route_table_info'", date='2021-12-01', collection_name='community.aws') connection = module.client( 'ec2', retry_decorator=AWSRetry.jittered_backoff(retries=10)) list_ec2_vpc_route_tables(connection, module)
def main(): argument_spec = dict(nacl_ids=dict(default=[], type='list', aliases=['nacl_id'], elements='str'), filters=dict(default={}, type='dict')) module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True) connection = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff()) list_ec2_vpc_nacls(connection, module)
def get_elb_listener_rules(connection, module, listener_arn): """ Get rules for a particular ELB listener using the listener ARN. :param connection: AWS boto3 elbv2 connection :param module: Ansible module :param listener_arn: ARN of the ELB listener :return: boto3 ELB rules list """ try: return AWSRetry.jittered_backoff()( connection.describe_rules)(ListenerArn=listener_arn)['Rules'] except (BotoCoreError, ClientError) as e: module.fail_json_aws(e)
def main(): argument_spec = dict( id=dict(required=False), cluster_states=dict(required=False, type=list, default=[]), list_bootstrap_actions=dict(required=False, type=bool), list_instance_fleets=dict(required=False, type=bool), list_instance_groups=dict(required=False, type=bool), list_steps=dict(required=False, type=bool), list_studios=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=( ('list_bootstrap_actions', True, ['id']), ('list_instance_fleets', True, ['id']), ('list_steps', True, ['id']), ), mutually_exclusive=[( 'list_bootstrap_actions', 'list_instance_fleets', 'list_instance_groups', 'list_steps', 'list_studios', )], ) client = module.client('emr', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _emr(client, module) if module.params['list_bootstrap_actions']: module.exit_json(bootstrap_actions=aws_response_list_parser( paginate, it, 'BootstrapActions')) elif module.params['list_instance_fleets']: module.exit_json(instance_fleets=aws_response_list_parser( paginate, it, 'InstanceFleets')) elif module.params['list_instance_groups']: module.exit_json(instance_groups=aws_response_list_parser( paginate, it, 'InstanceGroups')) elif module.params['list_steps']: module.exit_json(steps=aws_response_list_parser(paginate, it, 'Steps')) elif module.params['list_studios']: module.exit_json( studios=aws_response_list_parser(paginate, it, 'Studios')) else: module.exit_json( clusters=aws_response_list_parser(paginate, it, 'Clusters'))
def main(): protocols_list = [ 'http', 'https', 'tcp', 'tls', 'udp', 'tcp_udp', 'HTTP', 'HTTPS', 'TCP', 'TLS', 'UDP', 'TCP_UDP' ] argument_spec = dict( deregistration_delay_timeout=dict(type='int'), health_check_protocol=dict(choices=protocols_list), health_check_port=dict(), health_check_path=dict(), health_check_interval=dict(type='int'), health_check_timeout=dict(type='int'), healthy_threshold_count=dict(type='int'), modify_targets=dict(default=True, type='bool'), name=dict(required=True), port=dict(type='int'), protocol=dict(choices=protocols_list), purge_tags=dict(default=True, type='bool'), stickiness_enabled=dict(type='bool'), stickiness_type=dict(), stickiness_lb_cookie_duration=dict(type='int'), state=dict(required=True, choices=['present', 'absent']), successful_response_codes=dict(), tags=dict(default={}, type='dict'), target_type=dict(choices=['instance', 'ip', 'lambda']), targets=dict(type='list', elements='dict'), unhealthy_threshold_count=dict(type='int'), vpc_id=dict(), wait_timeout=dict(type='int', default=200), wait=dict(type='bool', default=False)) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=[ ['target_type', 'instance', ['protocol', 'port', 'vpc_id']], ['target_type', 'ip', ['protocol', 'port', 'vpc_id']], ]) if module.params.get('target_type') is None: module.params['target_type'] = 'instance' connection = module.client( 'elbv2', retry_decorator=AWSRetry.jittered_backoff(retries=10)) if module.params.get('state') == 'present': create_or_update_target_group(connection, module) else: delete_target_group(connection, module)
def main(): argument_spec = dict( arn=dict(required=False, aliases=['config_arn', 'cluster_arn']), list_cluster_operations=dict(required=False, type=bool), list_clusters=dict(required=False, type=bool), list_configuration_revisions=dict(required=False, type=bool), list_configurations=dict(required=False, type=bool), list_nodes=dict(required=False, type=bool), list_scram_secrets=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=( ('list_cluster_operations', True, ['arn']), ('list_configuration_revisions', True, ['arn']), ('list_nodes', True, ['arn']), ('list_scram_secrets', True, ['arn']), ), mutually_exclusive=[ ( 'list_cluster_operations', 'list_clusters', 'list_configuration_revisions', 'list_configurations', 'list_nodes', 'list_scram_secrets', ) ], ) client = module.client('kafka', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _kafka(client, module) if module.params['list_cluster_operations']: module.exit_json(cluster_operations=aws_response_list_parser(paginate, it, 'ClusterOperationInfoList')) elif module.params['list_clusters']: module.exit_json(clusters=aws_response_list_parser(paginate, it, 'ClusterInfoList')) elif module.params['list_configuration_revisions']: module.exit_json(configuration_revisions=aws_response_list_parser(paginate, it, 'Revisions')) elif module.params['list_configurations']: module.exit_json(configurations=aws_response_list_parser(paginate, it, 'Configurations')) elif module.params['list_nodes']: module.exit_json(nodes=aws_response_list_parser(paginate, it, 'NodeInfoList')) elif module.params['list_scram_secrets']: module.exit_json(scram_secrets=aws_response_list_parser(paginate, it, 'SecretArnList')) else: module.fail_json("unknown options are passed")
def get_target_group(connection, module, retry_missing=False): extra_codes = ['TargetGroupNotFound'] if retry_missing else [] try: target_group_paginator = connection.get_paginator( 'describe_target_groups').paginate( Names=[module.params.get("name")]) jittered_retry = AWSRetry.jittered_backoff( retries=10, catch_extra_error_codes=extra_codes) result = jittered_retry(target_group_paginator.build_full_result)() except is_boto3_error_code('TargetGroupNotFound'): return None except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: # pylint: disable=duplicate-except module.fail_json_aws(e, msg="Couldn't get target group") return result['TargetGroups'][0]
def main(): argument_spec = dict( filters=dict(default={}, type='dict'), spot_instance_request_ids=dict(default=[], type='list', elements='str'), ) module = AnsibleAWSModule( argument_spec=argument_spec, supports_check_mode=True ) try: connection = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff()) except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: module.fail_json_aws(e, msg='Failed to connect to AWS') describe_spot_instance_requests(connection, module)
def main(): argument_spec = dict( db_instance_identifier=dict(aliases=['id']), filters=dict(type='dict') ) module = AnsibleAWSModule( argument_spec=argument_spec, supports_check_mode=True, ) if module._name == 'rds_instance_facts': module.deprecate("The 'rds_instance_facts' module has been renamed to 'rds_instance_info'", date='2021-12-01', collection_name='community.aws') conn = module.client('rds', retry_decorator=AWSRetry.jittered_backoff(retries=10)) module.exit_json(**instance_info(module, conn))
def main(): argument_spec = dict( list_deployment_jobs=dict(required=False, type=bool), list_fleets=dict(required=False, type=bool), list_robot_applications=dict(required=False, type=bool), list_robots=dict(required=False, type=bool), list_simulation_jobs=dict(required=False, type=bool), list_worlds=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=(), mutually_exclusive=[( 'list_deployment_jobs', 'list_fleets', 'list_robot_applications', 'list_robots', 'list_simulation_jobs', 'list_worlds', )], ) client = module.client('robomaker', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _robomaker(client, module) if module.params['list_deployment_jobs']: module.exit_json(deployment_jobs=aws_response_list_parser( paginate, it, 'deploymentJobs')) elif module.params['list_fleets']: module.exit_json( fleets=aws_response_list_parser(paginate, it, 'fleetDetails')) elif module.params['list_robot_applications']: module.exit_json(robot_applications=aws_response_list_parser( paginate, it, 'robotApplicationSummaries')) elif module.params['list_robots']: module.exit_json( robots=aws_response_list_parser(paginate, it, 'robots')) elif module.params['list_simulation_jobs']: module.exit_json(simulation_jobs=aws_response_list_parser( paginate, it, 'simulationJobSummaries')) elif module.params['list_worlds']: module.exit_json( worlds=aws_response_list_parser(paginate, it, 'worldSummaries')) else: module.fail_json("unknown options are passed")
def main(): argument_spec = dict( filters=dict(default={}, type='dict'), nat_gateway_ids=dict(default=[], type='list', elements='str'), ) module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True,) try: connection = module.client('ec2', retry_decorator=AWSRetry.jittered_backoff()) except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: module.fail_json_aws(e, msg='Failed to connect to AWS') results = get_nat_gateways(connection, module) module.exit_json(result=results)
def main(): argument_spec = dict(list_items=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=(), mutually_exclusive=[('list_items', )], ) client = module.client('mediastore-data', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _mediastore_data(client, module) if module.params['list_items']: module.exit_json(items=aws_response_list_parser(paginate, it, 'Items')) else: module.fail_json("unknown options are passed")
def main(): argument_spec = dict( id=dict(required=False, aliases=['datastore_id']), job_id=dict(required=False), datastore_status=dict( required=False, choices=['CREATING', 'ACTIVE', 'DELETING', 'DELETED'], default='ACTIVE'), list_fhir_datastores=dict(required=False, type=bool), describe_fhir_datastore=dict(required=False, type=bool), describe_fhir_export_job=dict(required=False, type=bool), describe_fhir_import_job=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=( ('describe_fhir_datastore', True, ['id']), ('describe_fhir_export_job', True, ['id', 'job_id']), ('describe_fhir_import_job', True, ['id', 'job_id']), ), mutually_exclusive=[( 'list_fhir_datastores', 'describe_fhir_datastore', 'describe_fhir_export_job', 'describe_fhir_import_job', )], ) client = module.client('healthlake', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _healthlake(client, module) if module.params['list_fhir_datastores']: module.exit_json(fhir_datastores=aws_response_list_parser( paginate, it, 'DatastorePropertiesList')) elif module.params['describe_fhir_datastore']: module.exit_json(fhir_datastore=camel_dict_to_snake_dict(it)) elif module.params['describe_fhir_export_job']: module.exit_json(fhir_export_job=camel_dict_to_snake_dict( it['ExportJobProperties'])) elif module.params['describe_fhir_import_job']: module.exit_json(fhir_import_job=camel_dict_to_snake_dict( it['ImportJobProperties'])) else: module.fail_json("unknown options are passed")
def main(): argument_spec = dict(names=dict(default=[], type='list', elements='str')) module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True) connection = module.client('elb', retry_decorator=AWSRetry.jittered_backoff( retries=MAX_AWS_RETRIES, delay=MAX_AWS_DELAY)) try: elbs = list_elbs(connection, module.params.get('names')) except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: module.fail_json_aws(e, msg="Failed to get load balancer information.") module.exit_json(elbs=elbs)
def main(): argument_spec = dict( name=dict(required=True, type='str'), ids=dict(required=False, default=[], elements='str', type='list'), next_token=dict(required=False, default=None, type='str', no_log=True), max_records=dict(required=False, type='int'), ) module = AnsibleAWSModule( argument_spec=argument_spec, supports_check_mode=True, ) autoscaling = module.client( 'autoscaling', retry_decorator=AWSRetry.jittered_backoff(retries=10)) find_asg_instance_refreshes(autoscaling, module)
def main(): argument_spec = dict( id=dict(required=False), from_time=dict(required=False), resource_collection_type=dict(required=False, default='AWS_CLOUD_FORMATION'), describe_account_overview=dict(required=False, type=bool), describe_anomaly=dict(required=False, type=bool), describe_insight=dict(required=False, type=bool), describe_resource_collection_health=dict(required=False, type=bool), describe_service_integration=dict(required=False, type=bool), ) module = AnsibleAWSModule( argument_spec=argument_spec, required_if=( ('describe_account_overview', True, ['from_time']), ('describe_anomaly', True, ['id']), ('describe_insight', True, ['id']), ), mutually_exclusive=[ ( 'describe_account_overview', 'describe_anomaly', 'describe_insight', 'describe_resource_collection_health', 'describe_service_integration', ) ], ) client = module.client('devops-guru', retry_decorator=AWSRetry.exponential_backoff()) it, paginate = _devops_guru(client, module) if module.params['describe_account_overview']: module.exit_json(overview=camel_dict_to_snake_dict(it)) elif module.params['describe_anomaly']: module.exit_json(anomaly=camel_dict_to_snake_dict(it)) elif module.params['describe_insight']: module.exit_json(insight=camel_dict_to_snake_dict(it)) elif module.params['describe_resource_collection_health']: module.exit_json(cloud_formation=aws_response_list_parser(paginate, it, 'CloudFormation')) elif module.params['describe_service_integration']: module.exit_json(service_integration=camel_dict_to_snake_dict(it['ServiceIntegration'])) else: module.exit_json(health=camel_dict_to_snake_dict(it))
def main(): argument_spec = dict( names=dict(default=[], type='list', elements='str') ) module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True) if module._name == 'elb_classic_lb_facts': module.deprecate("The 'elb_classic_lb_facts' module has been renamed to 'elb_classic_lb_info'", date='2021-12-01', collection_name='community.aws') connection = module.client('elb', retry_decorator=AWSRetry.jittered_backoff(retries=MAX_AWS_RETRIES, delay=MAX_AWS_DELAY)) try: elbs = list_elbs(connection, module.params.get('names')) except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: module.fail_json_aws(e, msg="Failed to get load balancer information.") module.exit_json(elbs=elbs)
def main(): """ Module action handler """ argument_spec = dict( name=dict(aliases=['role_name']), path_prefix=dict(), ) module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True, mutually_exclusive=[['name', 'path_prefix']]) client = module.client('iam', retry_decorator=AWSRetry.jittered_backoff()) module.exit_json(changed=False, iam_roles=describe_iam_roles(module, client))