def __init__(self): self.url = None self.url_args = ModuleStub(url_argument_spec()) self.url_args.fail_json = self._error self.enable = None self.default_output = 'json' self._connected = False
def __init__(self): self.url = None self.url_args = ModuleStub(url_argument_spec(), self._error) self.headers = self.DEFAULT_HEADERS self._connected = False self.default_output = 'json'
def __init__(self): self.url = None self.url_args = ModuleStub(url_argument_spec(), self._error) self.token = None self.link = None self._connected = False self.default_output = 'text'
def main(): argument_spec = url_argument_spec() argument_spec.update( dict( api_key=dict(required=True, no_log=True), api_secret=dict(required=True, no_log=True), src=dict(required=True, type='int'), dest=dict(required=True, type='list'), msg=dict(required=True), ), ) module = AnsibleModule( argument_spec=argument_spec ) send_msg(module)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # add our own arguments argument_spec.update( url=dict(required=True), query=dict(type="str", required=False, default=""), resolved=dict(type="bool", default=False, choices=[True, False]), ) # Define the main module module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) icinga_object = Icinga2APIObject(module=module, path="/services/applyrules", data=[]) object_list = icinga_object.query(query=module.params["query"], resolved=module.params["resolved"]) module.exit_json(objects=object_list["data"]["objects"], )
def main(): argument_spec = url_argument_spec() argument_spec.update( url=dict(type='str', required=True), dest=dict(type='path', required=True), backup=dict(type='bool'), sha256sum=dict(type='str', default=''), checksum=dict(type='str', default=''), timeout=dict(type='int', default=10), headers=dict(type='raw'), tmp_dest=dict(type='path'), ) module = AnsibleModule( # not checking because of daisy chain to file module argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, mutually_exclusive=(['checksum', 'sha256sum']), ) url = module.params['url'] dest = module.params['dest'] backup = module.params['backup'] force = module.params['force'] sha256sum = module.params['sha256sum'] checksum = module.params['checksum'] use_proxy = module.params['use_proxy'] timeout = module.params['timeout'] tmp_dest = module.params['tmp_dest'] # Parse headers to dict if isinstance(module.params['headers'], dict): headers = module.params['headers'] elif module.params['headers']: try: headers = dict( item.split(':', 1) for item in module.params['headers'].split(',')) module.deprecate( 'Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`', version='2.10') except Exception: module.fail_json( msg= "The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed." ) else: headers = None dest_is_dir = os.path.isdir(dest) last_mod_time = None # workaround for usage of deprecated sha256sum parameter if sha256sum: checksum = 'sha256:%s' % (sha256sum) # checksum specified, parse for algorithm and checksum if checksum: try: algorithm, checksum = checksum.rsplit(':', 1) # Remove any non-alphanumeric characters, including the infamous # Unicode zero-width space checksum = re.sub(r'\W+', '', checksum).lower() # Ensure the checksum portion is a hexdigest int(checksum, 16) except ValueError: module.fail_json( msg= "The checksum parameter has to be in format <algorithm>:<checksum>" ) if not dest_is_dir and os.path.exists(dest): checksum_mismatch = False # If the download is not forced and there is a checksum, allow # checksum match to skip the download. if not force and checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum == destination_checksum: module.exit_json(msg="file already exists", dest=dest, url=url, changed=False) checksum_mismatch = True # Not forcing redownload, unless checksum does not match if not force and not checksum_mismatch: # allow file attribute changes module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest changed = module.set_fs_attributes_if_different(file_args, False) if changed: module.exit_json( msg="file already exists but file attributes changed", dest=dest, url=url, changed=changed) module.exit_json(msg="file already exists", dest=dest, url=url, changed=changed) # If the file already exists, prepare the last modified time for the # request. mtime = os.path.getmtime(dest) last_mod_time = datetime.datetime.utcfromtimestamp(mtime) # If the checksum does not match we have to force the download # because last_mod_time may be newer than on remote if checksum_mismatch: force = True # download to tmpsrc tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest) # Now the request has completed, we can finally generate the final # destination file name from the info dict. if dest_is_dir: filename = extract_filename_from_headers(info) if not filename: # Fall back to extracting the filename from the URL. # Pluck the URL from the info, since a redirect could have changed # it. filename = url_filename(info['url']) dest = os.path.join(dest, filename) checksum_src = None checksum_dest = None # If the remote URL exists, we're done with check mode if module.check_mode: os.remove(tmpsrc) res_args = dict(url=url, dest=dest, src=tmpsrc, changed=True, msg=info.get('msg', '')) module.exit_json(**res_args) # raise an error if there is no tmpsrc file if not os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg']) if not os.access(tmpsrc, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Source %s is not readable" % (tmpsrc)) checksum_src = module.sha1(tmpsrc) # check if there is no dest file if os.path.exists(dest): # raise an error if copy has no permission on dest if not os.access(dest, os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (dest)) if not os.access(dest, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not readable" % (dest)) checksum_dest = module.sha1(dest) else: if not os.path.exists(os.path.dirname(dest)): os.remove(tmpsrc) module.fail_json(msg="Destination %s does not exist" % (os.path.dirname(dest))) if not os.access(os.path.dirname(dest), os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (os.path.dirname(dest))) backup_file = None if checksum_src != checksum_dest: try: if backup: if os.path.exists(dest): backup_file = module.backup_local(dest) module.atomic_move(tmpsrc, dest) except Exception as e: if os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)), exception=traceback.format_exc()) changed = True else: changed = False if os.path.exists(tmpsrc): os.remove(tmpsrc) if checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum != destination_checksum: os.remove(dest) module.fail_json( msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum)) # allow file attribute changes module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest changed = module.set_fs_attributes_if_different(file_args, changed) # Backwards compat only. We'll return None on FIPS enabled systems try: md5sum = module.md5(dest) except ValueError: md5sum = None res_args = dict(url=url, dest=dest, src=tmpsrc, md5sum=md5sum, checksum_src=checksum_src, checksum_dest=checksum_dest, changed=changed, msg=info.get('msg', ''), status_code=info.get('status', '')) if backup_file: res_args['backup_file'] = backup_file # Mission complete module.exit_json(**res_args)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # remove unnecessary argument 'force' del argument_spec['force'] # add our own arguments argument_spec.update( state=dict(default="present", choices=["absent", "present"]), name=dict(required=True, aliases=['host']), zone=dict(), template=dict(default=None), check_command=dict(default="hostalive"), display_name=dict(default=None), ip=dict(required=True), variables=dict(type='dict', default=None), ) # Define the main module module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) state = module.params["state"] name = module.params["name"] zone = module.params["zone"] template = [] template.append(name) if module.params["template"]: template.append(module.params["template"]) check_command = module.params["check_command"] ip = module.params["ip"] display_name = module.params["display_name"] if not display_name: display_name = name variables = module.params["variables"] try: icinga = icinga2_api() icinga.module = module icinga.check_connection() except Exception as e: module.fail_json( msg="unable to connect to Icinga. Exception message: %s" % (e)) data = { 'attrs': { 'address': ip, 'display_name': display_name, 'check_command': check_command, 'zone': zone, 'vars': { 'made_by': "ansible", }, 'templates': template, } } if variables: data['attrs']['vars'].update(variables) changed = False if icinga.exists(name): if state == "absent": if module.check_mode: module.exit_json(changed=True, name=name, data=data) else: try: ret = icinga.delete(name) if ret['code'] == 200: changed = True else: module.fail_json( msg="bad return code deleting host: %s" % (ret['data'])) except Exception as e: module.fail_json(msg="exception deleting host: " + str(e)) elif icinga.diff(name, data): if module.check_mode: module.exit_json(changed=False, name=name, data=data) # Template attribute is not allowed in modification del data['attrs']['templates'] ret = icinga.modify(name, data) if ret['code'] == 200: changed = True else: module.fail_json(msg="bad return code modifying host: %s" % (ret['data'])) else: if state == "present": if module.check_mode: changed = True else: try: ret = icinga.create(name, data) if ret['code'] == 200: changed = True else: module.fail_json( msg="bad return code creating host: %s" % (ret['data'])) except Exception as e: module.fail_json(msg="exception creating host: " + str(e)) module.exit_json(changed=changed, name=name, data=data)
def __init__(self): self.url = None self.url_args = ModuleStub(url_argument_spec(), self._error) self._nxapi_auth = None self.default_output = 'json' self._connected = False
def main(): argument_spec = url_argument_spec() # setup aliases argument_spec['url_username']['aliases'] = ['username'] argument_spec['url_password']['aliases'] = ['password'] argument_spec.update( url=dict(type='str', required=True), dest=dict(type='path', required=True), backup=dict(type='bool', default=False), checksum=dict(type='str', default=''), timeout=dict(type='int', default=10), headers=dict(type='dict'), tmp_dest=dict(type='path'), unredirected_headers=dict(type='list', elements='str', default=[]), ) module = AnsibleModule( # not checking because of daisy chain to file module argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, ) url = module.params['url'] dest = module.params['dest'] backup = module.params['backup'] force = module.params['force'] checksum = module.params['checksum'] use_proxy = module.params['use_proxy'] timeout = module.params['timeout'] headers = module.params['headers'] tmp_dest = module.params['tmp_dest'] unredirected_headers = module.params['unredirected_headers'] result = dict( changed=False, checksum_dest=None, checksum_src=None, dest=dest, elapsed=0, url=url, ) dest_is_dir = os.path.isdir(dest) last_mod_time = None # checksum specified, parse for algorithm and checksum if checksum: try: algorithm, checksum = checksum.split(':', 1) except ValueError: module.fail_json( msg= "The checksum parameter has to be in format <algorithm>:<checksum>", **result) if is_url(checksum): checksum_url = checksum # download checksum file to checksum_tmpsrc checksum_tmpsrc, checksum_info = url_get( module, checksum_url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest, unredirected_headers=unredirected_headers) with open(checksum_tmpsrc) as f: lines = [line.rstrip('\n') for line in f] os.remove(checksum_tmpsrc) checksum_map = [] filename = url_filename(url) if len(lines) == 1 and len(lines[0].split()) == 1: # Only a single line with a single string # treat it as a checksum only file checksum_map.append((lines[0], filename)) else: # The assumption here is the file is in the format of # checksum filename for line in lines: # Split by one whitespace to keep the leading type char ' ' (whitespace) for text and '*' for binary parts = line.split(" ", 1) if len(parts) == 2: # Remove the leading type char, we expect if parts[1].startswith(( " ", "*", )): parts[1] = parts[1][1:] # Append checksum and path without potential leading './' checksum_map.append((parts[0], parts[1].lstrip("./"))) # Look through each line in the checksum file for a hash corresponding to # the filename in the url, returning the first hash that is found. for cksum in (s for (s, f) in checksum_map if f == filename): checksum = cksum break else: checksum = None if checksum is None: module.fail_json( msg="Unable to find a checksum for file '%s' in '%s'" % (filename, checksum_url)) # Remove any non-alphanumeric characters, including the infamous # Unicode zero-width space checksum = re.sub(r'\W+', '', checksum).lower() # Ensure the checksum portion is a hexdigest try: int(checksum, 16) except ValueError: module.fail_json(msg='The checksum format is invalid', **result) if not dest_is_dir and os.path.exists(dest): checksum_mismatch = False # If the download is not forced and there is a checksum, allow # checksum match to skip the download. if not force and checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum != destination_checksum: checksum_mismatch = True # Not forcing redownload, unless checksum does not match if not force and checksum and not checksum_mismatch: # Not forcing redownload, unless checksum does not match # allow file attribute changes file_args = module.load_file_common_arguments(module.params, path=dest) result['changed'] = module.set_fs_attributes_if_different( file_args, False) if result['changed']: module.exit_json( msg="file already exists but file attributes changed", **result) module.exit_json(msg="file already exists", **result) # If the file already exists, prepare the last modified time for the # request. mtime = os.path.getmtime(dest) last_mod_time = datetime.datetime.utcfromtimestamp(mtime) # If the checksum does not match we have to force the download # because last_mod_time may be newer than on remote if checksum_mismatch: force = True # download to tmpsrc start = datetime.datetime.utcnow() method = 'HEAD' if module.check_mode else 'GET' tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest, method, unredirected_headers=unredirected_headers) result['elapsed'] = (datetime.datetime.utcnow() - start).seconds result['src'] = tmpsrc # Now the request has completed, we can finally generate the final # destination file name from the info dict. if dest_is_dir: filename = extract_filename_from_headers(info) if not filename: # Fall back to extracting the filename from the URL. # Pluck the URL from the info, since a redirect could have changed # it. filename = url_filename(info['url']) dest = os.path.join(dest, filename) result['dest'] = dest # raise an error if there is no tmpsrc file if not os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], **result) if not os.access(tmpsrc, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Source %s is not readable" % (tmpsrc), **result) result['checksum_src'] = module.sha1(tmpsrc) # check if there is no dest file if os.path.exists(dest): # raise an error if copy has no permission on dest if not os.access(dest, os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (dest), **result) if not os.access(dest, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not readable" % (dest), **result) result['checksum_dest'] = module.sha1(dest) else: if not os.path.exists(os.path.dirname(dest)): os.remove(tmpsrc) module.fail_json(msg="Destination %s does not exist" % (os.path.dirname(dest)), **result) if not os.access(os.path.dirname(dest), os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (os.path.dirname(dest)), **result) if module.check_mode: if os.path.exists(tmpsrc): os.remove(tmpsrc) result['changed'] = ('checksum_dest' not in result or result['checksum_src'] != result['checksum_dest']) module.exit_json(msg=info.get('msg', ''), **result) backup_file = None if result['checksum_src'] != result['checksum_dest']: try: if backup: if os.path.exists(dest): backup_file = module.backup_local(dest) module.atomic_move(tmpsrc, dest, unsafe_writes=module.params['unsafe_writes']) except Exception as e: if os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)), exception=traceback.format_exc(), **result) result['changed'] = True else: result['changed'] = False if os.path.exists(tmpsrc): os.remove(tmpsrc) if checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum != destination_checksum: os.remove(dest) module.fail_json( msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum), **result) # allow file attribute changes file_args = module.load_file_common_arguments(module.params, path=dest) result['changed'] = module.set_fs_attributes_if_different( file_args, result['changed']) # Backwards compat only. We'll return None on FIPS enabled systems try: result['md5sum'] = module.md5(dest) except ValueError: result['md5sum'] = None if backup_file: result['backup_file'] = backup_file # Mission complete module.exit_json(msg=info.get('msg', ''), status_code=info.get('status', ''), **result)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # add our own arguments argument_spec.update( state=dict(default="present", choices=["absent", "present"]), url=dict(required=True), append=dict(type="bool", choices=[True, False]), object_name=dict(required=True, aliases=["name"]), disabled=dict(type="bool", default=False, choices=[True, False]), check_command=dict(required=False), check_interval=dict(required=False), check_period=dict(required=False), check_timeout=dict(required=False), enable_active_checks=dict(type="bool", required=False), enable_event_handler=dict(type="bool", required=False), enable_notifications=dict(type="bool", required=False), enable_passive_checks=dict(type="bool", required=False), enable_perfdata=dict(type="bool", required=False), event_command=dict(type="str", required=False), groups=dict(type="list", elements="str", default=[], required=False), imports=dict(type="list", elements="str", default=[], required=False), max_check_attempts=dict(required=False), notes=dict(type="str", required=False), notes_url=dict(type="str", required=False), retry_interval=dict(required=False), use_agent=dict(type="bool", required=False), vars=dict(type="dict", default={}, required=False), volatile=dict(type="bool", required=False), ) # Define the main module module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) data_keys = [ "object_name", "disabled", "check_command", "check_interval", "check_period", "check_timeout", "enable_active_checks", "enable_event_handler", "enable_notifications", "enable_passive_checks", "enable_perfdata", "event_command", "groups", "imports", "max_check_attempts", "notes", "notes_url", "retry_interval", "use_agent", "vars", "volatile", ] data = {} if module.params["append"]: for k in data_keys: if module.params[k]: data[k] = module.params[k] else: for k in data_keys: data[k] = module.params[k] data["object_type"] = "template" icinga_object = Icinga2APIObject(module=module, path="/service", data=data) changed, diff = icinga_object.update(module.params["state"]) module.exit_json( changed=changed, diff=diff, )
def run_module(): # module arguments module_args = url_argument_spec() module_args.update(dict( id=dict(type='str', required=True), label=dict(type='str', required=False), path=dict(type='path', required=False), devices=dict(type='list', required=False, default=False), fs_watcher=dict(type='bool', default=True), ignore_perms=dict(type='bool', required=False, default=False), type=dict(type='str', default='sendreceive', choices=['sendreceive', 'sendonly', 'receiveonly']), host=dict(type='str', default='http://127.0.0.1:8384'), api_key=dict(type='str', required=False, no_log=True), config_file=dict(type='path', required=False), timeout=dict(type='int', default=30), state=dict(type='str', default='present', choices=['absent', 'present', 'pause']), )) # seed the result dict in the object result = { "changed": False, "response": None, } # the AnsibleModule object will be our abstraction working with Ansible module = AnsibleModule( argument_spec=module_args, supports_check_mode=True ) if module.params['state'] != 'absent' and not module.params['path']: module.fail_json(msg='You must provide a path when creating', **result) if module.check_mode: return result # Auto-configuration: Try to fetch API key from filesystem if not module.params['api_key']: module.params['api_key'] = get_key_from_filesystem(module) config = get_config(module) self_id = get_status(module)['myID'] devices_mapping = get_devices_mapping(config) if module.params['state'] == 'absent': # Remove folder from list, if found for idx, folder in enumerate(config['folders']): if folder['id'] == module.params['id']: config['folders'].pop(idx) result['changed'] = True break else: folder_config = get_folder_config(module.params['id'], config) folder_config_devices = ( [d['deviceID'] for d in folder_config['devices']] if folder_config else [] ) folder_config_wanted = create_folder( module.params, self_id, folder_config_devices, devices_mapping ) if folder_config is None: config['folders'].append(folder_config_wanted) result['changed'] = True elif folder_config != folder_config_wanted: # Update the folder configuration in-place folder_config.clear() folder_config.update(folder_config_wanted) result['changed'] = True if result['changed']: post_config(module, config, result) module.exit_json(**result)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # remove unnecessary argument 'force' del argument_spec['force'] # add our own arguments argument_spec.update( state=dict(default="present", choices=["absent", "present"]), name=dict(required=True, aliases=['host']), zone=dict(), template=dict(default=None), check_command=dict(default="hostalive"), display_name=dict(default=None), ip=dict(required=True), variables=dict(type='dict', default=None), ) # Define the main module module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True ) state = module.params["state"] name = module.params["name"] zone = module.params["zone"] template = [] template.append(name) if module.params["template"]: template.append(module.params["template"]) check_command = module.params["check_command"] ip = module.params["ip"] display_name = module.params["display_name"] if not display_name: display_name = name variables = module.params["variables"] try: icinga = icinga2_api() icinga.module = module icinga.check_connection() except Exception as e: module.fail_json(msg="unable to connect to Icinga. Exception message: %s" % (e)) data = { 'attrs': { 'address': ip, 'display_name': display_name, 'check_command': check_command, 'zone': zone, 'vars': { 'made_by': "ansible", }, 'templates': template, } } if variables: data['attrs']['vars'].update(variables) changed = False if icinga.exists(name): if state == "absent": if module.check_mode: module.exit_json(changed=True, name=name, data=data) else: try: ret = icinga.delete(name) if ret['code'] == 200: changed = True else: module.fail_json(msg="bad return code deleting host: %s" % (ret['data'])) except Exception as e: module.fail_json(msg="exception deleting host: " + str(e)) elif icinga.diff(name, data): if module.check_mode: module.exit_json(changed=False, name=name, data=data) # Template attribute is not allowed in modification del data['attrs']['templates'] ret = icinga.modify(name, data) if ret['code'] == 200: changed = True else: module.fail_json(msg="bad return code modifying host: %s" % (ret['data'])) else: if state == "present": if module.check_mode: changed = True else: try: ret = icinga.create(name, data) if ret['code'] == 200: changed = True else: module.fail_json(msg="bad return code creating host: %s" % (ret['data'])) except Exception as e: module.fail_json(msg="exception creating host: " + str(e)) module.exit_json(changed=changed, name=name, data=data)
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type='str', default='present', choices=['present', 'absent']), service_group=dict(type='str', aliases=['service', 'pool', 'group'], required=True), service_group_protocol=dict(type='str', default='tcp', aliases=['proto', 'protocol'], choices=['tcp', 'udp']), service_group_method=dict(type='str', default='round-robin', aliases=['method'], choices=['round-robin', 'weighted-rr', 'least-connection', 'weighted-least-connection', 'service-least-connection', 'service-weighted-least-connection', 'fastest-response', 'least-request', 'round-robin-strict', 'src-ip-only-hash', 'src-ip-hash']), servers=dict(type='list', aliases=['server', 'member'], default=[]), partition=dict(type='str', default=[]), ) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=False ) host = module.params['host'] username = module.params['username'] password = module.params['password'] partition = module.params['partition'] state = module.params['state'] write_config = module.params['write_config'] slb_service_group = module.params['service_group'] slb_service_group_proto = module.params['service_group_protocol'] slb_service_group_method = module.params['service_group_method'] slb_servers = module.params['servers'] if slb_service_group is None: module.fail_json(msg='service_group is required') axapi_base_url = 'https://' + host + '/services/rest/V2.1/?format=json' load_balancing_methods = {'round-robin': 0, 'weighted-rr': 1, 'least-connection': 2, 'weighted-least-connection': 3, 'service-least-connection': 4, 'service-weighted-least-connection': 5, 'fastest-response': 6, 'least-request': 7, 'round-robin-strict': 8, 'src-ip-only-hash': 14, 'src-ip-hash': 15} if not slb_service_group_proto or slb_service_group_proto.lower() == 'tcp': protocol = 2 else: protocol = 3 # validate the server data list structure validate_servers(module, slb_servers) json_post = { 'service_group': { 'name': slb_service_group, 'protocol': protocol, 'lb_method': load_balancing_methods[slb_service_group_method], } } # first we authenticate to get a session id session_url = axapi_authenticate(module, axapi_base_url, username, password) # then we select the active-partition slb_server_partition = axapi_call(module, session_url + '&method=system.partition.active', json.dumps({'name': partition})) # then we check to see if the specified group exists slb_result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': slb_service_group})) slb_service_group_exist = not axapi_failure(slb_result) changed = False if state == 'present': # before creating/updating we need to validate that servers # defined in the servers list exist to prevent errors checked_servers = [] for server in slb_servers: result = axapi_call(module, session_url + '&method=slb.server.search', json.dumps({'name': server['server']})) if axapi_failure(result): module.fail_json(msg="the server %s specified in the servers list does not exist" % server['server']) checked_servers.append(server['server']) if not slb_service_group_exist: result = axapi_call(module, session_url + '&method=slb.service_group.create', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg=result['response']['err']['msg']) changed = True else: # check to see if the service group definition without the # server members is different, and update that individually # if it needs it do_update = False for field in VALID_SERVICE_GROUP_FIELDS: if json_post['service_group'][field] != slb_result['service_group'][field]: do_update = True break if do_update: result = axapi_call(module, session_url + '&method=slb.service_group.update', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg=result['response']['err']['msg']) changed = True # next we pull the defined list of servers out of the returned # results to make it a bit easier to iterate over defined_servers = slb_result.get('service_group', {}).get('member_list', []) # next we add/update new member servers from the user-specified # list if they're different or not on the target device for server in slb_servers: found = False different = False for def_server in defined_servers: if server['server'] == def_server['server']: found = True for valid_field in VALID_SERVER_FIELDS: if server[valid_field] != def_server[valid_field]: different = True break if found or different: break # add or update as required server_data = { "name": slb_service_group, "member": server, } if not found: result = axapi_call(module, session_url + '&method=slb.service_group.member.create', json.dumps(server_data)) changed = True elif different: result = axapi_call(module, session_url + '&method=slb.service_group.member.update', json.dumps(server_data)) changed = True # finally, remove any servers that are on the target # device but were not specified in the list given for server in defined_servers: found = False for slb_server in slb_servers: if server['server'] == slb_server['server']: found = True break # remove if not found server_data = { "name": slb_service_group, "member": server, } if not found: result = axapi_call(module, session_url + '&method=slb.service_group.member.delete', json.dumps(server_data)) changed = True # if we changed things, get the full info regarding # the service group for the return data below if changed: result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': slb_service_group})) else: result = slb_result elif state == 'absent': if slb_service_group_exist: result = axapi_call(module, session_url + '&method=slb.service_group.delete', json.dumps({'name': slb_service_group})) changed = True else: result = dict(msg="the service group was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call(module, session_url + '&method=system.action.write_memory') if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out of the session nicely and exit axapi_call(module, session_url + '&method=session.close') module.exit_json(changed=changed, content=result)
def main(): # Module arguments argument_spec = url_argument_spec() argument_spec.update( group=dict(default='jenkins'), jenkins_home=dict(default='/var/lib/jenkins'), mode=dict(default='0644', type='raw'), name=dict(required=True), owner=dict(default='jenkins'), params=dict(type='dict'), state=dict( choices=[ 'present', 'absent', 'pinned', 'unpinned', 'enabled', 'disabled', 'latest'], default='present'), timeout=dict(default=30, type="int"), updates_expiration=dict(default=86400, type="int"), updates_url=dict(default='https://updates.jenkins.io'), url=dict(default='http://localhost:8080'), url_password=dict(no_log=True), version=dict(), with_dependencies=dict(default=True, type='bool'), ) # Module settings module = AnsibleModule( argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, ) # Params was removed # https://meetbot.fedoraproject.org/ansible-meeting/2017-09-28/ansible_dev_meeting.2017-09-28-15.00.log.html if module.params['params']: module.fail_json(msg="The params option to jenkins_plugin was removed in Ansible 2.5 since it circumvents Ansible's option handling") # Force basic authentication module.params['force_basic_auth'] = True # Convert timeout to float try: module.params['timeout'] = float(module.params['timeout']) except ValueError as e: module.fail_json( msg='Cannot convert %s to float.' % module.params['timeout'], details=to_native(e)) # Set version to latest if state is latest if module.params['state'] == 'latest': module.params['state'] = 'present' module.params['version'] = 'latest' # Create some shortcuts name = module.params['name'] state = module.params['state'] # Initial change state of the task changed = False # Instantiate the JenkinsPlugin object jp = JenkinsPlugin(module) # Perform action depending on the requested state if state == 'present': changed = jp.install() elif state == 'absent': changed = jp.uninstall() elif state == 'pinned': changed = jp.pin() elif state == 'unpinned': changed = jp.unpin() elif state == 'enabled': changed = jp.enable() elif state == 'disabled': changed = jp.disable() # Print status of the change module.exit_json(changed=changed, plugin=name, state=state)
def main(): # Module arguments argument_spec = url_argument_spec() argument_spec.update( group=dict(default='jenkins'), jenkins_home=dict(default='/var/lib/jenkins'), mode=dict(default='0644', type='raw'), name=dict(required=True), owner=dict(default='jenkins'), params=dict(type='dict'), state=dict( choices=[ 'present', 'absent', 'pinned', 'unpinned', 'enabled', 'disabled', 'latest'], default='present'), timeout=dict(default=30, type="int"), updates_expiration=dict(default=86400, type="int"), updates_url=dict(default='https://updates.jenkins-ci.org'), url=dict(default='http://localhost:8080'), url_password=dict(no_log=True), version=dict(), with_dependencies=dict(default=True, type='bool'), ) # Module settings module = AnsibleModule( argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, ) # Update module parameters by user's parameters if defined if 'params' in module.params and isinstance(module.params['params'], dict): module.params.update(module.params['params']) # Remove the params module.params.pop('params', None) # Force basic authentication module.params['force_basic_auth'] = True # Convert timeout to float try: module.params['timeout'] = float(module.params['timeout']) except ValueError: e = get_exception() module.fail_json( msg='Cannot convert %s to float.' % module.params['timeout'], details=str(e)) # Set version to latest if state is latest if module.params['state'] == 'latest': module.params['state'] = 'present' module.params['version'] = 'latest' # Create some shortcuts name = module.params['name'] state = module.params['state'] # Initial change state of the task changed = False # Instantiate the JenkinsPlugin object jp = JenkinsPlugin(module) # Perform action depending on the requested state if state == 'present': changed = jp.install() elif state == 'absent': changed = jp.uninstall() elif state == 'pinned': changed = jp.pin() elif state == 'unpinned': changed = jp.unpin() elif state == 'enabled': changed = jp.enable() elif state == 'disabled': changed = jp.disable() # Print status of the change module.exit_json(changed=changed, plugin=name, state=state)
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type='str', default='present', choices=['present', 'absent']), server_name=dict(type='str', aliases=['server'], required=True), server_ip=dict(type='str', aliases=['ip', 'address']), server_status=dict(type='str', default='enabled', aliases=['status'], choices=['enabled', 'disabled']), server_ports=dict(type='list', aliases=['port'], default=[]), partition=dict(type='str', default=[]), ) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=False ) host = module.params['host'] partition = module.params['partition'] username = module.params['username'] password = module.params['password'] state = module.params['state'] write_config = module.params['write_config'] slb_server = module.params['server_name'] slb_server_ip = module.params['server_ip'] slb_server_status = module.params['server_status'] slb_server_ports = module.params['server_ports'] if slb_server is None: module.fail_json(msg='server_name is required') axapi_base_url = 'https://%s/services/rest/V2.1/?format=json' % host session_url = axapi_authenticate(module, axapi_base_url, username, password) # validate the ports data structure validate_ports(module, slb_server_ports) json_post = { 'server': { 'name': slb_server, } } # add optional module parameters if slb_server_ip: json_post['server']['host'] = slb_server_ip if slb_server_ports: json_post['server']['port_list'] = slb_server_ports if slb_server_status: json_post['server']['status'] = axapi_enabled_disabled(slb_server_status) axapi_call(module, session_url + '&method=system.partition.active', json.dumps({'name': partition})) slb_server_data = axapi_call(module, session_url + '&method=slb.server.search', json.dumps({'name': slb_server})) slb_server_exists = not axapi_failure(slb_server_data) changed = False if state == 'present': if not slb_server_exists: if not slb_server_ip: module.fail_json(msg='you must specify an IP address when creating a server') result = axapi_call(module, session_url + '&method=slb.server.create', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to create the server: %s" % result['response']['err']['msg']) changed = True else: def port_needs_update(src_ports, dst_ports): ''' Checks to determine if the port definitions of the src_ports array are in or different from those in dst_ports. If there is a difference, this function returns true, otherwise false. ''' for src_port in src_ports: found = False different = False for dst_port in dst_ports: if src_port['port_num'] == dst_port['port_num']: found = True for valid_field in VALID_PORT_FIELDS: if src_port[valid_field] != dst_port[valid_field]: different = True break if found or different: break if not found or different: return True # every port from the src exists in the dst, and none of them were different return False def status_needs_update(current_status, new_status): ''' Check to determine if we want to change the status of a server. If there is a difference between the current status of the server and the desired status, return true, otherwise false. ''' if current_status != new_status: return True return False defined_ports = slb_server_data.get('server', {}).get('port_list', []) current_status = slb_server_data.get('server', {}).get('status') # we check for a needed update several ways # - in case ports are missing from the ones specified by the user # - in case ports are missing from those on the device # - in case we are change the status of a server if (port_needs_update(defined_ports, slb_server_ports) or port_needs_update(slb_server_ports, defined_ports) or status_needs_update(current_status, axapi_enabled_disabled(slb_server_status))): result = axapi_call(module, session_url + '&method=slb.server.update', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to update the server: %s" % result['response']['err']['msg']) changed = True # if we changed things, get the full info regarding # the service group for the return data below if changed: result = axapi_call(module, session_url + '&method=slb.server.search', json.dumps({'name': slb_server})) else: result = slb_server_data elif state == 'absent': if slb_server_exists: result = axapi_call(module, session_url + '&method=slb.server.delete', json.dumps({'name': slb_server})) changed = True else: result = dict(msg="the server was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call(module, session_url + '&method=system.action.write_memory') if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out of the session nicely and exit axapi_call(module, session_url + '&method=session.close') module.exit_json(changed=changed, content=result)
def __init__(self): self.url = None self.url_args = ModuleStub(url_argument_spec(), self._error) self.headers = dict({'Content-Type': 'application/json', 'Accept': 'application/json'}) self._connected = False
def run_module(): # module arguments module_args = url_argument_spec() module_args.update(dict( id=dict(type='str', required=True), name=dict(type='str', required=False), host=dict(type='str', default='http://127.0.0.1:8384'), api_key=dict(type='str', required=False, no_log=True), timeout=dict(type='int', default=30), state=dict(type='str', default='present', choices=['absent', 'present', 'pause']), )) # seed the result dict in the object result = { "changed": False, "response": None, } # the AnsibleModule object will be our abstraction working with Ansible module = AnsibleModule( argument_spec=module_args, supports_check_mode=True ) if module.params['state'] != 'absent' and not module.params['name']: module.fail_json(msg='You must provide a name when creating', **result) if module.check_mode: return result # Auto-configuration: Try to fetch API key from filesystem if not module.params['api_key']: module.params['api_key'] = get_key_from_filesystem(module) config = get_config(module) if module.params['state'] == 'absent': # Remove device from list, if found for idx, device in enumerate(config['devices']): if device['deviceID'] == module.params['id']: config['devices'].pop(idx) result['changed'] = True break else: # Bail-out if device is already added for device in config['devices']: if device['deviceID'] == module.params['id']: want_pause = module.params['state'] == 'pause' if (want_pause and device['paused']) or \ (not want_pause and not device['paused']): module.exit_json(**result) else: device['paused'] = want_pause result['changed'] = True break # Append the new device into configuration if not result['changed']: device = create_device(module.params) config['devices'].append(device) result['changed'] = True if result['changed']: post_config(module, config, result) module.exit_json(**result)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # add our own arguments argument_spec.update( state=dict(default="present", choices=["absent", "present"]), url=dict(required=True), object_name=dict(required=True, aliases=["name"]), imports=dict(type="list", elements="str", required=False, default=[]), disabled=dict(type="bool", required=False, default=False, choices=[True, False]), vars=dict(type="dict", default={}), command=dict(required=False), command_type=dict( default="PluginCheck", choices=["PluginCheck", "PluginNotification", "PluginEvent"], ), timeout=dict(required=False, default=None), zone=dict(required=False, default=None), arguments=dict(type="dict", default=None), ) # When deleting objects, only the name is necessary, so we cannot use # required=True in the argument_spec. Instead we define here what is # necessary when state is present required_if = [("state", "present", ["object_name"])] # Define the main module module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=required_if, ) # typ von arguments ist eigentlich dict, also ohne Angabe = {} # die director API schickt hier aber ein leeres Array zurueck wenn nichts definiert # daher ueberschreiben, damit der diff besser funktioniert if not module.params["arguments"]: module.params["arguments"] = [] data = { "object_name": module.params["object_name"], "object_type": "object", "imports": module.params["imports"], "disabled": module.params["disabled"], "vars": module.params["vars"], "command": module.params["command"], "methods_execute": module.params["command_type"], "timeout": module.params["timeout"], "zone": module.params["zone"], "arguments": module.params["arguments"], } icinga_object = Icinga2APIObject(module=module, path="/command", data=data) changed, diff = icinga_object.update(module.params["state"]) module.exit_json( changed=changed, diff=diff, )
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type='str', default='present', choices=['present', 'absent']), service_group=dict(type='str', aliases=['service', 'pool', 'group'], required=True), service_group_protocol=dict(type='str', default='tcp', aliases=['proto', 'protocol'], choices=['tcp', 'udp']), service_group_method=dict( type='str', default='round-robin', aliases=['method'], choices=[ 'round-robin', 'weighted-rr', 'least-connection', 'weighted-least-connection', 'service-least-connection', 'service-weighted-least-connection', 'fastest-response', 'least-request', 'round-robin-strict', 'src-ip-only-hash', 'src-ip-hash' ]), servers=dict(type='list', aliases=['server', 'member'], default=[]), partition=dict(type='str', default=[]), )) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) host = module.params['host'] username = module.params['username'] password = module.params['password'] partition = module.params['partition'] state = module.params['state'] write_config = module.params['write_config'] slb_service_group = module.params['service_group'] slb_service_group_proto = module.params['service_group_protocol'] slb_service_group_method = module.params['service_group_method'] slb_servers = module.params['servers'] if slb_service_group is None: module.fail_json(msg='service_group is required') axapi_base_url = 'https://' + host + '/services/rest/V2.1/?format=json' load_balancing_methods = { 'round-robin': 0, 'weighted-rr': 1, 'least-connection': 2, 'weighted-least-connection': 3, 'service-least-connection': 4, 'service-weighted-least-connection': 5, 'fastest-response': 6, 'least-request': 7, 'round-robin-strict': 8, 'src-ip-only-hash': 14, 'src-ip-hash': 15 } if not slb_service_group_proto or slb_service_group_proto.lower() == 'tcp': protocol = 2 else: protocol = 3 # validate the server data list structure validate_servers(module, slb_servers) json_post = { 'service_group': { 'name': slb_service_group, 'protocol': protocol, 'lb_method': load_balancing_methods[slb_service_group_method], } } # first we authenticate to get a session id session_url = axapi_authenticate(module, axapi_base_url, username, password) # then we select the active-partition axapi_call(module, session_url + '&method=system.partition.active', json.dumps({'name': partition})) # then we check to see if the specified group exists slb_result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': slb_service_group})) slb_service_group_exist = not axapi_failure(slb_result) changed = False if state == 'present': # before creating/updating we need to validate that servers # defined in the servers list exist to prevent errors checked_servers = [] for server in slb_servers: result = axapi_call(module, session_url + '&method=slb.server.search', json.dumps({'name': server['server']})) if axapi_failure(result): module.fail_json( msg= "the server %s specified in the servers list does not exist" % server['server']) checked_servers.append(server['server']) if not slb_service_group_exist: result = axapi_call( module, session_url + '&method=slb.service_group.create', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg=result['response']['err']['msg']) changed = True else: # check to see if the service group definition without the # server members is different, and update that individually # if it needs it do_update = False for field in VALID_SERVICE_GROUP_FIELDS: if json_post['service_group'][field] != slb_result[ 'service_group'][field]: do_update = True break if do_update: result = axapi_call( module, session_url + '&method=slb.service_group.update', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg=result['response']['err']['msg']) changed = True # next we pull the defined list of servers out of the returned # results to make it a bit easier to iterate over defined_servers = slb_result.get('service_group', {}).get('member_list', []) # next we add/update new member servers from the user-specified # list if they're different or not on the target device for server in slb_servers: found = False different = False for def_server in defined_servers: if server['server'] == def_server['server']: found = True for valid_field in VALID_SERVER_FIELDS: if server[valid_field] != def_server[valid_field]: different = True break if found or different: break # add or update as required server_data = { "name": slb_service_group, "member": server, } if not found: result = axapi_call( module, session_url + '&method=slb.service_group.member.create', json.dumps(server_data)) changed = True elif different: result = axapi_call( module, session_url + '&method=slb.service_group.member.update', json.dumps(server_data)) changed = True # finally, remove any servers that are on the target # device but were not specified in the list given for server in defined_servers: found = False for slb_server in slb_servers: if server['server'] == slb_server['server']: found = True break # remove if not found server_data = { "name": slb_service_group, "member": server, } if not found: result = axapi_call( module, session_url + '&method=slb.service_group.member.delete', json.dumps(server_data)) changed = True # if we changed things, get the full info regarding # the service group for the return data below if changed: result = axapi_call( module, session_url + '&method=slb.service_group.search', json.dumps({'name': slb_service_group})) else: result = slb_result elif state == 'absent': if slb_service_group_exist: result = axapi_call( module, session_url + '&method=slb.service_group.delete', json.dumps({'name': slb_service_group})) changed = True else: result = dict(msg="the service group was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call( module, session_url + '&method=system.action.write_memory') if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out of the session nicely and exit axapi_call(module, session_url + '&method=session.close') module.exit_json(changed=changed, content=result)
def main(): argument_spec = url_argument_spec() argument_spec.update( add_export_distributor=dict(default=False, type='bool'), feed=dict(), importer_ssl_ca_cert=dict(), importer_ssl_client_cert=dict(), importer_ssl_client_key=dict(), name=dict(required=True, aliases=['repo']), proxy_host=dict(), proxy_port=dict(), publish_distributor=dict(), pulp_host=dict(default="https://127.0.0.1"), relative_url=dict(), repo_type=dict(default="rpm"), serve_http=dict(default=False, type='bool'), serve_https=dict(default=True, type='bool'), state=dict( default="present", choices=['absent', 'present', 'sync', 'publish']), wait_for_completion=dict(default=False, type="bool")) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True) add_export_distributor = module.params['add_export_distributor'] feed = module.params['feed'] importer_ssl_ca_cert = module.params['importer_ssl_ca_cert'] importer_ssl_client_cert = module.params['importer_ssl_client_cert'] importer_ssl_client_key = module.params['importer_ssl_client_key'] proxy_host = module.params['proxy_host'] proxy_port = module.params['proxy_port'] publish_distributor = module.params['publish_distributor'] pulp_host = module.params['pulp_host'] relative_url = module.params['relative_url'] repo = module.params['name'] repo_type = module.params['repo_type'] serve_http = module.params['serve_http'] serve_https = module.params['serve_https'] state = module.params['state'] wait_for_completion = module.params['wait_for_completion'] if (state == 'present') and (not relative_url): module.fail_json(msg="When state is present, relative_url is required.") # Ensure that the importer_ssl_* is the content and not a file path if importer_ssl_ca_cert is not None: importer_ssl_ca_cert_file_path = os.path.abspath(importer_ssl_ca_cert) if os.path.isfile(importer_ssl_ca_cert_file_path): importer_ssl_ca_cert_file_object = open(importer_ssl_ca_cert_file_path, 'r') try: importer_ssl_ca_cert = importer_ssl_ca_cert_file_object.read() finally: importer_ssl_ca_cert_file_object.close() if importer_ssl_client_cert is not None: importer_ssl_client_cert_file_path = os.path.abspath(importer_ssl_client_cert) if os.path.isfile(importer_ssl_client_cert_file_path): importer_ssl_client_cert_file_object = open(importer_ssl_client_cert_file_path, 'r') try: importer_ssl_client_cert = importer_ssl_client_cert_file_object.read() finally: importer_ssl_client_cert_file_object.close() if importer_ssl_client_key is not None: importer_ssl_client_key_file_path = os.path.abspath(importer_ssl_client_key) if os.path.isfile(importer_ssl_client_key_file_path): importer_ssl_client_key_file_object = open(importer_ssl_client_key_file_path, 'r') try: importer_ssl_client_key = importer_ssl_client_key_file_object.read() finally: importer_ssl_client_key_file_object.close() server = pulp_server(module, pulp_host, repo_type, wait_for_completion=wait_for_completion) server.set_repo_list() repo_exists = server.check_repo_exists(repo) changed = False if state == 'absent' and repo_exists: if not module.check_mode: server.delete_repo(repo) changed = True if state == 'sync': if not repo_exists: module.fail_json(msg="Repository was not found. The repository can not be synced.") if not module.check_mode: server.sync_repo(repo) changed = True if state == 'publish': if not repo_exists: module.fail_json(msg="Repository was not found. The repository can not be published.") if not module.check_mode: server.publish_repo(repo, publish_distributor) changed = True if state == 'present': if not repo_exists: if not module.check_mode: server.create_repo( repo_id=repo, relative_url=relative_url, feed=feed, serve_http=serve_http, serve_https=serve_https, proxy_host=proxy_host, proxy_port=proxy_port, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key, add_export_distributor=add_export_distributor) changed = True else: # Check to make sure all the settings are correct # The importer config gets overwritten on set and not updated, so # we set the whole config at the same time. if not server.compare_repo_importer_config( repo, feed=feed, proxy_host=proxy_host, proxy_port=proxy_port, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key ): if not module.check_mode: server.update_repo_importer_config( repo, feed=feed, proxy_host=proxy_host, proxy_port=proxy_port, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key) changed = True if relative_url is not None: if not server.compare_repo_distributor_config( repo, relative_url=relative_url ): if not module.check_mode: server.update_repo_distributor_config( repo, relative_url=relative_url) changed = True if not server.compare_repo_distributor_config(repo, http=serve_http): if not module.check_mode: server.update_repo_distributor_config(repo, http=serve_http) changed = True if not server.compare_repo_distributor_config(repo, https=serve_https): if not module.check_mode: server.update_repo_distributor_config(repo, https=serve_https) changed = True module.exit_json(changed=changed, repo=repo)
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type='str', default='present', choices=['present', 'absent']), virtual_server=dict(type='str', aliases=['vip', 'virtual'], required=True), virtual_server_ip=dict(type='str', aliases=['ip', 'address'], required=True), virtual_server_status=dict(type='str', default='enabled', aliases=['status'], choices=['enabled', 'disabled']), virtual_server_ports=dict(type='list', required=True), partition=dict(type='str', default=[]), )) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) host = module.params['host'] username = module.params['username'] password = module.params['password'] partition = module.params['partition'] state = module.params['state'] write_config = module.params['write_config'] slb_virtual = module.params['virtual_server'] slb_virtual_ip = module.params['virtual_server_ip'] slb_virtual_status = module.params['virtual_server_status'] slb_virtual_ports = module.params['virtual_server_ports'] if slb_virtual is None: module.fail_json(msg='virtual_server is required') validate_ports(module, slb_virtual_ports) axapi_base_url = 'https://%s/services/rest/V2.1/?format=json' % host session_url = axapi_authenticate(module, axapi_base_url, username, password) axapi_call(module, session_url + '&method=system.partition.active', json.dumps({'name': partition})) slb_virtual_data = axapi_call( module, session_url + '&method=slb.virtual_server.search', json.dumps({'name': slb_virtual})) slb_virtual_exists = not axapi_failure(slb_virtual_data) changed = False if state == 'present': json_post = { 'virtual_server': { 'name': slb_virtual, 'address': slb_virtual_ip, 'status': axapi_enabled_disabled(slb_virtual_status), 'vport_list': slb_virtual_ports, } } # before creating/updating we need to validate that any # service groups defined in the ports list exist since # since the API will still create port definitions for # them while indicating a failure occurred checked_service_groups = [] for port in slb_virtual_ports: if 'service_group' in port and port[ 'service_group'] not in checked_service_groups: # skip blank service group entries if port['service_group'] == '': continue result = axapi_call( module, session_url + '&method=slb.service_group.search', json.dumps({'name': port['service_group']})) if axapi_failure(result): module.fail_json( msg= "the service group %s specified in the ports list does not exist" % port['service_group']) checked_service_groups.append(port['service_group']) if not slb_virtual_exists: result = axapi_call( module, session_url + '&method=slb.virtual_server.create', json.dumps(json_post)) if axapi_failure(result): module.fail_json( msg="failed to create the virtual server: %s" % result['response']['err']['msg']) changed = True else: def needs_update(src_ports, dst_ports): ''' Checks to determine if the port definitions of the src_ports array are in or different from those in dst_ports. If there is a difference, this function returns true, otherwise false. ''' for src_port in src_ports: found = False different = False for dst_port in dst_ports: if src_port['port'] == dst_port['port']: found = True for valid_field in VALID_PORT_FIELDS: if src_port[valid_field] != dst_port[ valid_field]: different = True break if found or different: break if not found or different: return True # every port from the src exists in the dst, and none of them were different return False defined_ports = slb_virtual_data.get('virtual_server', {}).get('vport_list', []) # we check for a needed update both ways, in case ports # are missing from either the ones specified by the user # or from those on the device if needs_update(defined_ports, slb_virtual_ports) or needs_update( slb_virtual_ports, defined_ports): result = axapi_call( module, session_url + '&method=slb.virtual_server.update', json.dumps(json_post)) if axapi_failure(result): module.fail_json( msg="failed to create the virtual server: %s" % result['response']['err']['msg']) changed = True # if we changed things, get the full info regarding # the service group for the return data below if changed: result = axapi_call( module, session_url + '&method=slb.virtual_server.search', json.dumps({'name': slb_virtual})) else: result = slb_virtual_data elif state == 'absent': if slb_virtual_exists: result = axapi_call( module, session_url + '&method=slb.virtual_server.delete', json.dumps({'name': slb_virtual})) changed = True else: result = dict(msg="the virtual server was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call( module, session_url + '&method=system.action.write_memory') if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out of the session nicely and exit axapi_call(module, session_url + '&method=session.close') module.exit_json(changed=changed, content=result)
def main(): # Module arguments argument_spec = url_argument_spec() argument_spec.update( group=dict(default='jenkins'), jenkins_home=dict(default='/var/lib/jenkins'), mode=dict(default='0644', type='raw'), name=dict(required=True), owner=dict(default='jenkins'), params=dict(type='dict'), state=dict( choices=[ 'present', 'absent', 'pinned', 'unpinned', 'enabled', 'disabled', 'latest'], default='present'), timeout=dict(default=30, type="int"), updates_expiration=dict(default=86400, type="int"), updates_url=dict(default='https://updates.jenkins-ci.org'), url=dict(default='http://localhost:8080'), url_password=dict(no_log=True), version=dict(), with_dependencies=dict(default=True, type='bool'), ) # Module settings module = AnsibleModule( argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, ) # Params was removed # https://meetbot.fedoraproject.org/ansible-meeting/2017-09-28/ansible_dev_meeting.2017-09-28-15.00.log.html if module.params['params']: module.fail_json(msg="The params option to jenkins_plugin was removed in Ansible 2.5" "since it circumvents Ansible's option handling") # Force basic authentication module.params['force_basic_auth'] = True # Convert timeout to float try: module.params['timeout'] = float(module.params['timeout']) except ValueError as e: module.fail_json( msg='Cannot convert %s to float.' % module.params['timeout'], details=to_native(e)) # Set version to latest if state is latest if module.params['state'] == 'latest': module.params['state'] = 'present' module.params['version'] = 'latest' # Create some shortcuts name = module.params['name'] state = module.params['state'] # Initial change state of the task changed = False # Instantiate the JenkinsPlugin object jp = JenkinsPlugin(module) # Perform action depending on the requested state if state == 'present': changed = jp.install() elif state == 'absent': changed = jp.uninstall() elif state == 'pinned': changed = jp.pin() elif state == 'unpinned': changed = jp.unpin() elif state == 'enabled': changed = jp.enable() elif state == 'disabled': changed = jp.disable() # Print status of the change module.exit_json(changed=changed, plugin=name, state=state)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # remove unnecessary arguments del argument_spec['force'] del argument_spec['force_basic_auth'] del argument_spec['http_agent'] argument_spec.update( state=dict(choices=['present', 'absent', 'export'], default='present'), url=dict(aliases=['grafana_url'], required=True), url_username=dict(aliases=['grafana_user'], default='admin'), url_password=dict(aliases=['grafana_password'], default='admin', no_log=True), grafana_api_key=dict(type='str', no_log=True), org_id=dict(default=1, type='int'), uid=dict(type='str'), slug=dict(type='str'), path=dict(type='str'), overwrite=dict(type='bool', default=False), message=dict(type='str'), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=False, required_together=[['url_username', 'url_password', 'org_id']], mutually_exclusive=[['grafana_user', 'grafana_api_key'], ['uid', 'slug']], ) try: if module.params['state'] == 'present': result = grafana_create_dashboard(module, module.params) elif module.params['state'] == 'absent': result = grafana_delete_dashboard(module, module.params) else: result = grafana_export_dashboard(module, module.params) except GrafanaAPIException as e: module.fail_json( failed=True, msg="error : %s" % to_native(e) ) return except GrafanaMalformedJson as e: module.fail_json( failed=True, msg="error : json file does not contain a meta section with a slug parameter, or you did not specify the slug parameter" ) return except GrafanaDeleteException as e: module.fail_json( failed=True, msg="error : Can't delete dashboard : %s" % to_native(e) ) return except GrafanaExportException as e: module.fail_json( failed=True, msg="error : Can't export dashboard : %s" % to_native(e) ) return module.exit_json( failed=False, **result ) return
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type="str", default="present", choices=["present", "absent"]), server_name=dict(type="str", aliases=["server"], required=True), server_ip=dict(type="str", aliases=["ip", "address"]), server_status=dict(type="str", default="enabled", aliases=["status"], choices=["enabled", "disabled"]), server_ports=dict(type="list", aliases=["port"], default=[]), partition=dict(type="str", default=[]), ) ) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) host = module.params["host"] partition = module.params["partition"] username = module.params["username"] password = module.params["password"] state = module.params["state"] write_config = module.params["write_config"] slb_server = module.params["server_name"] slb_server_ip = module.params["server_ip"] slb_server_status = module.params["server_status"] slb_server_ports = module.params["server_ports"] if slb_server is None: module.fail_json(msg="server_name is required") axapi_base_url = "https://%s/services/rest/V2.1/?format=json" % host session_url = axapi_authenticate(module, axapi_base_url, username, password) # validate the ports data structure validate_ports(module, slb_server_ports) json_post = {"server": {"name": slb_server}} # add optional module parameters if slb_server_ip: json_post["server"]["host"] = slb_server_ip if slb_server_ports: json_post["server"]["port_list"] = slb_server_ports if slb_server_status: json_post["server"]["status"] = axapi_enabled_disabled(slb_server_status) slb_server_partition = axapi_call( module, session_url + "&method=system.partition.active", json.dumps({"name": partition}) ) slb_server_data = axapi_call(module, session_url + "&method=slb.server.search", json.dumps({"name": slb_server})) slb_server_exists = not axapi_failure(slb_server_data) changed = False if state == "present": if not slb_server_exists: if not slb_server_ip: module.fail_json(msg="you must specify an IP address when creating a server") result = axapi_call(module, session_url + "&method=slb.server.create", json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to create the server: %s" % result["response"]["err"]["msg"]) changed = True else: def port_needs_update(src_ports, dst_ports): """ Checks to determine if the port definitions of the src_ports array are in or different from those in dst_ports. If there is a difference, this function returns true, otherwise false. """ for src_port in src_ports: found = False different = False for dst_port in dst_ports: if src_port["port_num"] == dst_port["port_num"]: found = True for valid_field in VALID_PORT_FIELDS: if src_port[valid_field] != dst_port[valid_field]: different = True break if found or different: break if not found or different: return True # every port from the src exists in the dst, and none of them were different return False def status_needs_update(current_status, new_status): """ Check to determine if we want to change the status of a server. If there is a difference between the current status of the server and the desired status, return true, otherwise false. """ if current_status != new_status: return True return False defined_ports = slb_server_data.get("server", {}).get("port_list", []) current_status = slb_server_data.get("server", {}).get("status") # we check for a needed update several ways # - in case ports are missing from the ones specified by the user # - in case ports are missing from those on the device # - in case we are change the status of a server if ( port_needs_update(defined_ports, slb_server_ports) or port_needs_update(slb_server_ports, defined_ports) or status_needs_update(current_status, axapi_enabled_disabled(slb_server_status)) ): result = axapi_call(module, session_url + "&method=slb.server.update", json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to update the server: %s" % result["response"]["err"]["msg"]) changed = True # if we changed things, get the full info regarding # the service group for the return data below if changed: result = axapi_call(module, session_url + "&method=slb.server.search", json.dumps({"name": slb_server})) else: result = slb_server_data elif state == "absent": if slb_server_exists: result = axapi_call(module, session_url + "&method=slb.server.delete", json.dumps({"name": slb_server})) changed = True else: result = dict(msg="the server was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call(module, session_url + "&method=system.action.write_memory") if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result["response"]["err"]["msg"]) # log out of the session nicely and exit axapi_call(module, session_url + "&method=session.close") module.exit_json(changed=changed, content=result)
def main(): argument_spec = url_argument_spec() argument_spec.update( dest=dict(type='path'), url_username=dict(type='str', aliases=['user']), url_password=dict(type='str', aliases=['password'], no_log=True), body=dict(type='raw'), body_format=dict(type='str', default='raw', choices=['form-urlencoded', 'json', 'raw']), src=dict(type='path'), method=dict(type='str', default='GET'), return_content=dict(type='bool', default=False), follow_redirects=dict( type='str', default='safe', choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']), creates=dict(type='path'), removes=dict(type='path'), status_code=dict(type='list', default=[200]), timeout=dict(type='int', default=30), headers=dict(type='dict', default={}), unix_socket=dict(type='path'), ) module = AnsibleModule( argument_spec=argument_spec, # TODO: Remove check_invalid_arguments in 2.9 check_invalid_arguments=False, add_file_common_args=True, mutually_exclusive=[['body', 'src']], ) url = module.params['url'] body = module.params['body'] body_format = module.params['body_format'].lower() method = module.params['method'].upper() dest = module.params['dest'] return_content = module.params['return_content'] creates = module.params['creates'] removes = module.params['removes'] status_code = [int(x) for x in list(module.params['status_code'])] socket_timeout = module.params['timeout'] dict_headers = module.params['headers'] if not re.match('^[A-Z]+$', method): module.fail_json( msg= "Parameter 'method' needs to be a single word in uppercase, like GET or POST." ) if body_format == 'json': # Encode the body unless its a string, then assume it is pre-formatted JSON if not isinstance(body, string_types): body = json.dumps(body) if 'content-type' not in [header.lower() for header in dict_headers]: dict_headers['Content-Type'] = 'application/json' elif body_format == 'form-urlencoded': if not isinstance(body, string_types): try: body = form_urlencoded(body) except ValueError as e: module.fail_json( msg='failed to parse body as form_urlencoded: %s' % to_native(e), elapsed=0) if 'content-type' not in [header.lower() for header in dict_headers]: dict_headers['Content-Type'] = 'application/x-www-form-urlencoded' # TODO: Deprecated section. Remove in Ansible 2.9 # Grab all the http headers. Need this hack since passing multi-values is # currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}') for key, value in iteritems(module.params): if key.startswith("HEADER_"): module.deprecate( 'Supplying headers via HEADER_* is deprecated. Please use `headers` to' ' supply headers for the request', version='2.9') skey = key.replace("HEADER_", "") dict_headers[skey] = value # End deprecated section if creates is not None: # do not run the command if the line contains creates=filename # and the filename already exists. This allows idempotence # of uri executions. if os.path.exists(creates): module.exit_json(stdout="skipped, since '%s' exists" % creates, changed=False) if removes is not None: # do not run the command if the line contains removes=filename # and the filename does not exist. This allows idempotence # of uri executions. if not os.path.exists(removes): module.exit_json(stdout="skipped, since '%s' does not exist" % removes, changed=False) # Make the request start = datetime.datetime.utcnow() resp, content, dest = uri(module, url, dest, body, body_format, method, dict_headers, socket_timeout) resp['elapsed'] = (datetime.datetime.utcnow() - start).seconds resp['status'] = int(resp['status']) # Write the file out if requested if dest is not None: if resp['status'] == 304: resp['changed'] = False else: write_file(module, url, dest, content, resp) # allow file attribute changes resp['changed'] = True module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest resp['changed'] = module.set_fs_attributes_if_different( file_args, resp['changed']) resp['path'] = dest else: resp['changed'] = False # Transmogrify the headers, replacing '-' with '_', since variables don't # work with dashes. # In python3, the headers are title cased. Lowercase them to be # compatible with the python2 behaviour. uresp = {} for key, value in iteritems(resp): ukey = key.replace("-", "_").lower() uresp[ukey] = value if 'location' in uresp: uresp['location'] = absolute_location(url, uresp['location']) # Default content_encoding to try content_encoding = 'utf-8' if 'content_type' in uresp: content_type, params = cgi.parse_header(uresp['content_type']) if 'charset' in params: content_encoding = params['charset'] u_content = to_text(content, encoding=content_encoding) if any(candidate in content_type for candidate in JSON_CANDIDATES): try: js = json.loads(u_content) uresp['json'] = js except Exception: if PY2: sys.exc_clear( ) # Avoid false positive traceback in fail_json() on Python 2 else: u_content = to_text(content, encoding=content_encoding) if resp['status'] not in status_code: uresp['msg'] = 'Status code was %s and not %s: %s' % ( resp['status'], status_code, uresp.get('msg', '')) module.fail_json(content=u_content, **uresp) elif return_content: module.exit_json(content=u_content, **uresp) else: module.exit_json(**uresp)
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type='str', default='present', choices=['present', 'absent']), partition=dict(type='str', aliases=['partition', 'part'], required=False), file_name=dict(type='str', aliases=['filename'], required=False), method=dict(type='str', choices=['upload', 'download'], required=False), overwrite=dict(type='bool', default=False, required=False), )) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) host = module.params['host'] username = module.params['username'] password = module.params['password'] part = module.params['partition'] state = module.params['state'] file_name = module.params['file_name'] method = module.params['method'] overwrite = module.params['overwrite'] if method and method != 'upload' and method != 'download': module.fail_json(msg="method must be one of 'upload' or 'download'") # authenticate axapi_base_url = 'https://%s/axapi/v3/' % host signature = axapi_authenticate_v3(module, axapi_base_url + 'auth', username, password) # change partitions if we need to if part: part_change_result = axapi_call_v3(module, axapi_base_url + 'active-partition/' + part, method="POST", signature=signature, body="") if (part_change_result['response']['status'] == 'fail'): # log out of the session nicely and exit with an error result = axapi_call_v3(module, axapi_base_url + 'logoff', method="POST", signature=signature, body="") module.fail_json(msg=part_change_result['response']['err']['msg']) # look for the aflex script on the device aflex_data = axapi_call_v3(module, axapi_base_url + 'file/aflex/' + file_name, method="GET", signature=signature) aflex_content = "" if ('response' in aflex_data and aflex_data['response']['status'] == 'fail'): if (aflex_data['response']['code'] == 404): aflex_exists = False else: logoff_result = axapi_call_v3(module, axapi_base_url + 'logoff', method="POST", signature=signature, body="") module.fail_json(msg=aflex_data['response']['err']['msg']) else: aflex_content = aflex_data['response']['data'] aflex_exists = True changed = False if state == 'present': if (method == "upload" and aflex_exists and overwrite) or (method == "upload" and not aflex_exists): if os.path.isfile(file_name) is False: # log out of the session nicely and exit with an error result = axapi_call_v3(module, axapi_base_url + 'logoff', method="POST", signature=signature, body="") module.fail_json(msg='File does not exist ' + file_name) else: try: result = uploadAflex(axapi_base_url + 'file/aflex', file_name, file_name, signature=signature) except Exception, e: # log out of the session nicely and exit with an error #err_result = e['changed'] result = axapi_call_v3(module, axapi_base_url + 'logoff', method="POST", signature=signature, body="") module.fail_json(msg=e) if axapi_failure(result): # log out of the session nicely and exit with an error result = axapi_call_v3(module, axapi_base_url + 'logoff', method="POST", signature=signature, body="") module.fail_json(msg="failed to upload the aflex: %s" % result['response']['err']['msg']) changed = True elif method == "download" and aflex_exists: saveFile(file_name, aflex_content) elif method == "download" and not aflex_exists: result = axapi_call_v3(module, axapi_base_url + 'logoff', method="POST", signature=signature, body="") module.fail_json(msg="aflex cannot be found the device")
def main(): argument_spec = url_argument_spec() argument_spec.update( dest=dict(type='path'), url_username=dict(type='str', aliases=['user']), url_password=dict(type='str', aliases=['password'], no_log=True), body=dict(type='raw'), body_format=dict(type='str', default='raw', choices=['form-urlencoded', 'json', 'raw']), src=dict(type='path'), method=dict(type='str', default='GET'), return_content=dict(type='bool', default=False), follow_redirects=dict( type='str', default='safe', choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']), creates=dict(type='path'), removes=dict(type='path'), status_code=dict(type='list', default=[200]), timeout=dict(type='int', default=30), headers=dict(type='dict', default={}), unix_socket=dict(type='path'), remote_src=dict(type='bool', default=False), ) module = AnsibleModule( argument_spec=argument_spec, add_file_common_args=True, mutually_exclusive=[['body', 'src']], ) if module.params.get('thirsty'): module.deprecate( 'The alias "thirsty" has been deprecated and will be removed, use "force" instead', version='2.13') url = module.params['url'] body = module.params['body'] body_format = module.params['body_format'].lower() method = module.params['method'].upper() dest = module.params['dest'] return_content = module.params['return_content'] creates = module.params['creates'] removes = module.params['removes'] status_code = [int(x) for x in list(module.params['status_code'])] socket_timeout = module.params['timeout'] dict_headers = module.params['headers'] if not re.match('^[A-Z]+$', method): module.fail_json( msg= "Parameter 'method' needs to be a single word in uppercase, like GET or POST." ) if body_format == 'json': # Encode the body unless its a string, then assume it is pre-formatted JSON if not isinstance(body, string_types): body = json.dumps(body) if 'content-type' not in [header.lower() for header in dict_headers]: dict_headers['Content-Type'] = 'application/json' elif body_format == 'form-urlencoded': if not isinstance(body, string_types): try: body = form_urlencoded(body) except ValueError as e: module.fail_json( msg='failed to parse body as form_urlencoded: %s' % to_native(e), elapsed=0) if 'content-type' not in [header.lower() for header in dict_headers]: dict_headers['Content-Type'] = 'application/x-www-form-urlencoded' if creates is not None: # do not run the command if the line contains creates=filename # and the filename already exists. This allows idempotence # of uri executions. if os.path.exists(creates): module.exit_json(stdout="skipped, since '%s' exists" % creates, changed=False) if removes is not None: # do not run the command if the line contains removes=filename # and the filename does not exist. This allows idempotence # of uri executions. if not os.path.exists(removes): module.exit_json(stdout="skipped, since '%s' does not exist" % removes, changed=False) # Make the request start = datetime.datetime.utcnow() resp, content, dest = uri(module, url, dest, body, body_format, method, dict_headers, socket_timeout) resp['elapsed'] = (datetime.datetime.utcnow() - start).seconds resp['status'] = int(resp['status']) resp['changed'] = False # Write the file out if requested if dest is not None: if resp['status'] in status_code and resp['status'] != 304: write_file(module, url, dest, content, resp) # allow file attribute changes resp['changed'] = True module.params['path'] = dest file_args = module.load_file_common_arguments(module.params, path=dest) resp['changed'] = module.set_fs_attributes_if_different( file_args, resp['changed']) resp['path'] = dest # Transmogrify the headers, replacing '-' with '_', since variables don't # work with dashes. # In python3, the headers are title cased. Lowercase them to be # compatible with the python2 behaviour. uresp = {} for key, value in iteritems(resp): ukey = key.replace("-", "_").lower() uresp[ukey] = value if 'location' in uresp: uresp['location'] = absolute_location(url, uresp['location']) # Default content_encoding to try content_encoding = 'utf-8' if 'content_type' in uresp: # Handle multiple Content-Type headers charsets = [] content_types = [] for value in uresp['content_type'].split(','): ct, params = cgi.parse_header(value) if ct not in content_types: content_types.append(ct) if 'charset' in params: if params['charset'] not in charsets: charsets.append(params['charset']) if content_types: content_type = content_types[0] if len(content_types) > 1: module.warn( 'Received multiple conflicting Content-Type values (%s), using %s' % (', '.join(content_types), content_type)) if charsets: content_encoding = charsets[0] if len(charsets) > 1: module.warn( 'Received multiple conflicting charset values (%s), using %s' % (', '.join(charsets), content_encoding)) u_content = to_text(content, encoding=content_encoding) if any(candidate in content_type for candidate in JSON_CANDIDATES): try: js = json.loads(u_content) uresp['json'] = js except Exception: if PY2: sys.exc_clear( ) # Avoid false positive traceback in fail_json() on Python 2 else: u_content = to_text(content, encoding=content_encoding) if resp['status'] not in status_code: uresp['msg'] = 'Status code was %s and not %s: %s' % ( resp['status'], status_code, uresp.get('msg', '')) module.fail_json(content=u_content, **uresp) elif return_content: module.exit_json(content=u_content, **uresp) else: module.exit_json(**uresp)
def main(): argument_spec = url_argument_spec() argument_spec.update(add_export_distributor=dict(default=False, type='bool'), feed=dict(), importer_ssl_ca_cert=dict(), importer_ssl_client_cert=dict(), importer_ssl_client_key=dict(), name=dict(required=True, aliases=['repo']), proxy_host=dict(), proxy_port=dict(), publish_distributor=dict(), pulp_host=dict(default="https://127.0.0.1"), relative_url=dict(), repo_type=dict(default="rpm"), serve_http=dict(default=False, type='bool'), serve_https=dict(default=True, type='bool'), state=dict( default="present", choices=['absent', 'present', 'sync', 'publish']), wait_for_completion=dict(default=False, type="bool")) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) add_export_distributor = module.params['add_export_distributor'] feed = module.params['feed'] importer_ssl_ca_cert = module.params['importer_ssl_ca_cert'] importer_ssl_client_cert = module.params['importer_ssl_client_cert'] importer_ssl_client_key = module.params['importer_ssl_client_key'] proxy_host = module.params['proxy_host'] proxy_port = module.params['proxy_port'] publish_distributor = module.params['publish_distributor'] pulp_host = module.params['pulp_host'] relative_url = module.params['relative_url'] repo = module.params['name'] repo_type = module.params['repo_type'] serve_http = module.params['serve_http'] serve_https = module.params['serve_https'] state = module.params['state'] wait_for_completion = module.params['wait_for_completion'] if (state == 'present') and (not relative_url): module.fail_json( msg="When state is present, relative_url is required.") # Ensure that the importer_ssl_* is the content and not a file path if importer_ssl_ca_cert is not None: importer_ssl_ca_cert_file_path = os.path.abspath(importer_ssl_ca_cert) if os.path.isfile(importer_ssl_ca_cert_file_path): importer_ssl_ca_cert_file_object = open( importer_ssl_ca_cert_file_path, 'r') try: importer_ssl_ca_cert = importer_ssl_ca_cert_file_object.read() finally: importer_ssl_ca_cert_file_object.close() if importer_ssl_client_cert is not None: importer_ssl_client_cert_file_path = os.path.abspath( importer_ssl_client_cert) if os.path.isfile(importer_ssl_client_cert_file_path): importer_ssl_client_cert_file_object = open( importer_ssl_client_cert_file_path, 'r') try: importer_ssl_client_cert = importer_ssl_client_cert_file_object.read( ) finally: importer_ssl_client_cert_file_object.close() if importer_ssl_client_key is not None: importer_ssl_client_key_file_path = os.path.abspath( importer_ssl_client_key) if os.path.isfile(importer_ssl_client_key_file_path): importer_ssl_client_key_file_object = open( importer_ssl_client_key_file_path, 'r') try: importer_ssl_client_key = importer_ssl_client_key_file_object.read( ) finally: importer_ssl_client_key_file_object.close() server = pulp_server(module, pulp_host, repo_type, wait_for_completion=wait_for_completion) server.set_repo_list() repo_exists = server.check_repo_exists(repo) changed = False if state == 'absent' and repo_exists: if not module.check_mode: server.delete_repo(repo) changed = True if state == 'sync': if not repo_exists: module.fail_json( msg= "Repository was not found. The repository can not be synced.") if not module.check_mode: server.sync_repo(repo) changed = True if state == 'publish': if not repo_exists: module.fail_json( msg= "Repository was not found. The repository can not be published." ) if not module.check_mode: server.publish_repo(repo, publish_distributor) changed = True if state == 'present': if not repo_exists: if not module.check_mode: server.create_repo( repo_id=repo, relative_url=relative_url, feed=feed, serve_http=serve_http, serve_https=serve_https, proxy_host=proxy_host, proxy_port=proxy_port, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key, add_export_distributor=add_export_distributor) changed = True else: # Check to make sure all the settings are correct # The importer config gets overwritten on set and not updated, so # we set the whole config at the same time. if not server.compare_repo_importer_config( repo, feed=feed, proxy_host=proxy_host, proxy_port=proxy_port, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key): if not module.check_mode: server.update_repo_importer_config( repo, feed=feed, proxy_host=proxy_host, proxy_port=proxy_port, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key) changed = True if relative_url is not None: if not server.compare_repo_distributor_config( repo, relative_url=relative_url): if not module.check_mode: server.update_repo_distributor_config( repo, relative_url=relative_url) changed = True if not server.compare_repo_distributor_config(repo, http=serve_http): if not module.check_mode: server.update_repo_distributor_config(repo, http=serve_http) changed = True if not server.compare_repo_distributor_config(repo, https=serve_https): if not module.check_mode: server.update_repo_distributor_config(repo, https=serve_https) changed = True module.exit_json(changed=changed, repo=repo)
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type='str', default='present', choices=['present', 'absent']), virtual_server=dict(type='str', aliases=['vip', 'virtual'], required=True), virtual_server_ip=dict(type='str', aliases=['ip', 'address'], required=True), virtual_server_status=dict(type='str', default='enabled', aliases=['status'], choices=['enabled', 'disabled']), virtual_server_ports=dict(type='list', required=True), partition=dict(type='str', default=[]), ) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=False ) host = module.params['host'] username = module.params['username'] password = module.params['password'] partition = module.params['partition'] state = module.params['state'] write_config = module.params['write_config'] slb_virtual = module.params['virtual_server'] slb_virtual_ip = module.params['virtual_server_ip'] slb_virtual_status = module.params['virtual_server_status'] slb_virtual_ports = module.params['virtual_server_ports'] if slb_virtual is None: module.fail_json(msg='virtual_server is required') validate_ports(module, slb_virtual_ports) axapi_base_url = 'https://%s/services/rest/V2.1/?format=json' % host session_url = axapi_authenticate(module, axapi_base_url, username, password) axapi_call(module, session_url + '&method=system.partition.active', json.dumps({'name': partition})) slb_virtual_data = axapi_call(module, session_url + '&method=slb.virtual_server.search', json.dumps({'name': slb_virtual})) slb_virtual_exists = not axapi_failure(slb_virtual_data) changed = False if state == 'present': json_post = { 'virtual_server': { 'name': slb_virtual, 'address': slb_virtual_ip, 'status': axapi_enabled_disabled(slb_virtual_status), 'vport_list': slb_virtual_ports, } } # before creating/updating we need to validate that any # service groups defined in the ports list exist since # since the API will still create port definitions for # them while indicating a failure occurred checked_service_groups = [] for port in slb_virtual_ports: if 'service_group' in port and port['service_group'] not in checked_service_groups: # skip blank service group entries if port['service_group'] == '': continue result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': port['service_group']})) if axapi_failure(result): module.fail_json(msg="the service group %s specified in the ports list does not exist" % port['service_group']) checked_service_groups.append(port['service_group']) if not slb_virtual_exists: result = axapi_call(module, session_url + '&method=slb.virtual_server.create', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to create the virtual server: %s" % result['response']['err']['msg']) changed = True else: def needs_update(src_ports, dst_ports): ''' Checks to determine if the port definitions of the src_ports array are in or different from those in dst_ports. If there is a difference, this function returns true, otherwise false. ''' for src_port in src_ports: found = False different = False for dst_port in dst_ports: if src_port['port'] == dst_port['port']: found = True for valid_field in VALID_PORT_FIELDS: if src_port[valid_field] != dst_port[valid_field]: different = True break if found or different: break if not found or different: return True # every port from the src exists in the dst, and none of them were different return False defined_ports = slb_virtual_data.get('virtual_server', {}).get('vport_list', []) # we check for a needed update both ways, in case ports # are missing from either the ones specified by the user # or from those on the device if needs_update(defined_ports, slb_virtual_ports) or needs_update(slb_virtual_ports, defined_ports): result = axapi_call(module, session_url + '&method=slb.virtual_server.update', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to create the virtual server: %s" % result['response']['err']['msg']) changed = True # if we changed things, get the full info regarding # the service group for the return data below if changed: result = axapi_call(module, session_url + '&method=slb.virtual_server.search', json.dumps({'name': slb_virtual})) else: result = slb_virtual_data elif state == 'absent': if slb_virtual_exists: result = axapi_call(module, session_url + '&method=slb.virtual_server.delete', json.dumps({'name': slb_virtual})) changed = True else: result = dict(msg="the virtual server was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call(module, session_url + '&method=system.action.write_memory') if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out of the session nicely and exit axapi_call(module, session_url + '&method=session.close') module.exit_json(changed=changed, content=result)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # remove unnecessary argument 'force' del argument_spec['force'] # add our own arguments argument_spec.update( state=dict(default="present", choices=["absent", "present"]), name=dict(), host=dict(), service=dict(), zone=dict(), template=dict(default=None), check_command=dict(default="hostalive"), display_name=dict(default=None), command_endpoint=dict(default=""), force_check=dict(default=True, type='bool'), variables=dict(type='dict', default=None), ) # Define the main module module = AnsibleModule(argument_spec=argument_spec, mutually_exclusive=[ ['name', 'host'], ['name', 'service'], ], required_together=[ ['host', 'service'], ], supports_check_mode=True) state = module.params["state"] if module.params["host"]: name = module.params["host"] + "!" + module.params["service"] else: name = module.params["name"] zone = module.params["zone"] template = [] template.append(name) if module.params["template"]: template.append(module.params["template"]) check_command = module.params["check_command"] command_endpoint = module.params["command_endpoint"] display_name = module.params["display_name"] force_check = module.params["force_check"] if not display_name: display_name = name variables = module.params["variables"] try: icinga = icinga2_api() icinga.module = module icinga.check_connection() except Exception as e: module.fail_json( msg="unable to connect to Icinga. Exception message: %s" % (e)) data = { 'attrs': { 'check_command': check_command, 'command_endpoint': command_endpoint, 'display_name': display_name, 'zone': zone, 'vars': { 'made_by': "ansible", }, 'templates': template, } } if variables: data['attrs']['vars'].update(variables) changed = False if icinga.exists(name): if state == "absent": if module.check_mode: module.exit_json(changed=True, name=name, data=data) else: try: ret = icinga.delete(name) if ret['code'] == 200: changed = True else: module.fail_json( msg="bad return code deleting service: %s" % (ret['data'])) except Exception as e: module.fail_json(msg="exception deleting service: " + str(e)) elif icinga.diff(name, data): if module.check_mode: module.exit_json(changed=False, name=name, data=data) # ret = icinga.modify(name,data) ret = icinga.delete(name) ret = icinga.create(name, data) if ret['code'] == 200: changed = True if force_check: ret = icinga.check(name) if ret['code'] != 200: module.fail_json( msg="bad return code checking service: %s" % (ret['data'])) else: module.fail_json(msg="bad return code modifying service: %s" % (ret['data'])) else: if state == "present": if module.check_mode: changed = True else: try: ret = icinga.create(name, data) if ret['code'] == 200: changed = True if force_check: ret = icinga.check(name) if ret['code'] != 200: module.fail_json( msg="bad return code checking service: %s" % (ret['data'])) else: module.fail_json( msg="bad return code creating service: %s" % (ret['data'])) except Exception as e: module.fail_json(msg="exception creating service: " + str(e)) module.exit_json(changed=changed, name=name, data=data)
def main(): argument_spec = url_argument_spec() # setup aliases argument_spec['url_username']['aliases'] = ['username'] argument_spec['url_password']['aliases'] = ['password'] argument_spec.update( url=dict(type='str', required=True), dest=dict(type='path', required=True), backup=dict(type='bool'), sha256sum=dict(type='str', default=''), checksum=dict(type='str', default=''), timeout=dict(type='int', default=10), headers=dict(type='raw'), tmp_dest=dict(type='path'), ) module = AnsibleModule( # not checking because of daisy chain to file module argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, mutually_exclusive=[['checksum', 'sha256sum']], ) url = module.params['url'] dest = module.params['dest'] backup = module.params['backup'] force = module.params['force'] sha256sum = module.params['sha256sum'] checksum = module.params['checksum'] use_proxy = module.params['use_proxy'] timeout = module.params['timeout'] tmp_dest = module.params['tmp_dest'] result = dict( changed=False, checksum_dest=None, checksum_src=None, dest=dest, elapsed=0, url=url, ) # Parse headers to dict if isinstance(module.params['headers'], dict): headers = module.params['headers'] elif module.params['headers']: try: headers = dict( item.split(':', 1) for item in module.params['headers'].split(',')) module.deprecate( 'Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`', version='2.10') except Exception: module.fail_json( msg= "The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed.", **result) else: headers = None dest_is_dir = os.path.isdir(dest) last_mod_time = None # workaround for usage of deprecated sha256sum parameter if sha256sum: checksum = 'sha256:%s' % (sha256sum) # checksum specified, parse for algorithm and checksum if checksum: try: algorithm, checksum = checksum.split(':', 1) except ValueError: module.fail_json( msg= "The checksum parameter has to be in format <algorithm>:<checksum>", **result) if checksum.startswith('http://') or checksum.startswith( 'https://') or checksum.startswith('ftp://'): checksum_url = checksum # download checksum file to checksum_tmpsrc checksum_tmpsrc, checksum_info = url_get(module, checksum_url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest) with open(checksum_tmpsrc) as f: lines = [line.rstrip('\n') for line in f] os.remove(checksum_tmpsrc) checksum_map = {} for line in lines: parts = line.split(None, 1) if len(parts) == 2: checksum_map[parts[0]] = parts[1] filename = url_filename(url) # Look through each line in the checksum file for a hash corresponding to # the filename in the url, returning the first hash that is found. for cksum in (s for (s, f) in checksum_map.items() if f.strip('./') == filename): checksum = cksum break else: checksum = None if checksum is None: module.fail_json( msg="Unable to find a checksum for file '%s' in '%s'" % (filename, checksum_url)) # Remove any non-alphanumeric characters, including the infamous # Unicode zero-width space checksum = re.sub(r'\W+', '', checksum).lower() # Ensure the checksum portion is a hexdigest try: int(checksum, 16) except ValueError: module.fail_json(msg='The checksum format is invalid', **result) if not dest_is_dir and os.path.exists(dest): checksum_mismatch = False # If the download is not forced and there is a checksum, allow # checksum match to skip the download. if not force and checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum != destination_checksum: checksum_mismatch = True # Not forcing redownload, unless checksum does not match if not force and checksum and not checksum_mismatch: # Not forcing redownload, unless checksum does not match # allow file attribute changes module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest result['changed'] = module.set_fs_attributes_if_different( file_args, False) if result['changed']: module.exit_json( msg="file already exists but file attributes changed", **result) module.exit_json(msg="file already exists", **result) # If the file already exists, prepare the last modified time for the # request. mtime = os.path.getmtime(dest) last_mod_time = datetime.datetime.utcfromtimestamp(mtime) # If the checksum does not match we have to force the download # because last_mod_time may be newer than on remote if checksum_mismatch: force = True # download to tmpsrc start = datetime.datetime.utcnow() tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest) result['elapsed'] = (datetime.datetime.utcnow() - start).seconds result['src'] = tmpsrc # Now the request has completed, we can finally generate the final # destination file name from the info dict. if dest_is_dir: filename = extract_filename_from_headers(info) if not filename: # Fall back to extracting the filename from the URL. # Pluck the URL from the info, since a redirect could have changed # it. filename = url_filename(info['url']) dest = os.path.join(dest, filename) result['dest'] = dest # raise an error if there is no tmpsrc file if not os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], **result) if not os.access(tmpsrc, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Source %s is not readable" % (tmpsrc), **result) result['checksum_src'] = module.sha1(tmpsrc) # check if there is no dest file if os.path.exists(dest): # raise an error if copy has no permission on dest if not os.access(dest, os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (dest), **result) if not os.access(dest, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not readable" % (dest), **result) result['checksum_dest'] = module.sha1(dest) else: if not os.path.exists(os.path.dirname(dest)): os.remove(tmpsrc) module.fail_json(msg="Destination %s does not exist" % (os.path.dirname(dest)), **result) if not os.access(os.path.dirname(dest), os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (os.path.dirname(dest)), **result) if module.check_mode: if os.path.exists(tmpsrc): os.remove(tmpsrc) result['changed'] = ('checksum_dest' not in result or result['checksum_src'] != result['checksum_dest']) module.exit_json(msg=info.get('msg', ''), **result) backup_file = None if result['checksum_src'] != result['checksum_dest']: try: if backup: if os.path.exists(dest): backup_file = module.backup_local(dest) module.atomic_move(tmpsrc, dest) except Exception as e: if os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)), exception=traceback.format_exc(), **result) result['changed'] = True else: result['changed'] = False if os.path.exists(tmpsrc): os.remove(tmpsrc) if checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum != destination_checksum: os.remove(dest) module.fail_json( msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum), **result) # allow file attribute changes module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest result['changed'] = module.set_fs_attributes_if_different( file_args, result['changed']) # Backwards compat only. We'll return None on FIPS enabled systems try: result['md5sum'] = module.md5(dest) except ValueError: result['md5sum'] = None if backup_file: result['backup_file'] = backup_file # Mission complete module.exit_json(msg=info.get('msg', ''), status_code=info.get('status', ''), **result)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # remove unnecessary argument 'force' del argument_spec["force"] del argument_spec["http_agent"] # add our own arguments argument_spec.update( state=dict(default="present", choices=["absent", "present"]), object_name=dict(required=True), imports=dict(type="list", required=False, default=[]), disabled=dict(type="bool", required=False, default=False, choices=[True, False]), vars=dict(type="dict", default={}), command=dict(required=False), command_type=dict( default="PluginCheck", choices=["PluginCheck", "PluginNotification", "PluginEvent"], ), timeout=dict(required=False, default=None), zone=dict(required=False, default=None), arguments=dict(type="dict", default=None), ) # Define the main module module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) # typ von arguments ist eigentlich dict, also ohne Angabe = {} # die director API schickt hier aber ein leeres Array zurueck wenn nichts definiert # daher ueberschreiben, damit der diff besser funktioniert if not module.params["arguments"]: module.params["arguments"] = [] data = { "object_name": module.params["object_name"], "object_type": "object", "imports": module.params["imports"], "disabled": module.params["disabled"], "vars": module.params["vars"], "command": module.params["command"], "methods_execute": module.params["command_type"], "timeout": module.params["timeout"], "zone": module.params["zone"], "arguments": module.params["arguments"], } try: icinga_object = Icinga2APIObject(module=module, path="/command", data=data) except Exception as e: module.fail_json( msg="unable to connect to Icinga. Exception message: %s" % e) changed, diff = icinga_object.update(module.params["state"]) module.exit_json( changed=changed, object_name=module.params["object_name"], data=icinga_object.data, diff=diff, )
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type='str', default='present', choices=['present', 'absent']), server_name=dict(type='str', aliases=['server'], required=True), server_ip=dict(type='str', aliases=['ip', 'address']), server_status=dict(type='str', default='enabled', aliases=['status'], choices=['enabled', 'disabled']), server_ports=dict(type='list', aliases=['port'], default=[]), partition=dict(type='str', default=[]), )) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) host = module.params['host'] partition = module.params['partition'] username = module.params['username'] password = module.params['password'] state = module.params['state'] write_config = module.params['write_config'] slb_server = module.params['server_name'] slb_server_ip = module.params['server_ip'] slb_server_status = module.params['server_status'] slb_server_ports = module.params['server_ports'] if slb_server is None: module.fail_json(msg='server_name is required') axapi_base_url = 'https://%s/services/rest/V2.1/?format=json' % host session_url = axapi_authenticate(module, axapi_base_url, username, password) # validate the ports data structure validate_ports(module, slb_server_ports) json_post = { 'server': { 'name': slb_server, } } # add optional module parameters if slb_server_ip: json_post['server']['host'] = slb_server_ip if slb_server_ports: json_post['server']['port_list'] = slb_server_ports if slb_server_status: json_post['server']['status'] = axapi_enabled_disabled( slb_server_status) axapi_call(module, session_url + '&method=system.partition.active', json.dumps({'name': partition})) slb_server_data = axapi_call(module, session_url + '&method=slb.server.search', json.dumps({'name': slb_server})) slb_server_exists = not axapi_failure(slb_server_data) changed = False if state == 'present': if not slb_server_exists: if not slb_server_ip: module.fail_json( msg='you must specify an IP address when creating a server' ) result = axapi_call(module, session_url + '&method=slb.server.create', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to create the server: %s" % result['response']['err']['msg']) changed = True else: def port_needs_update(src_ports, dst_ports): ''' Checks to determine if the port definitions of the src_ports array are in or different from those in dst_ports. If there is a difference, this function returns true, otherwise false. ''' for src_port in src_ports: found = False different = False for dst_port in dst_ports: if src_port['port_num'] == dst_port['port_num']: found = True for valid_field in VALID_PORT_FIELDS: if src_port[valid_field] != dst_port[ valid_field]: different = True break if found or different: break if not found or different: return True # every port from the src exists in the dst, and none of them were different return False def status_needs_update(current_status, new_status): ''' Check to determine if we want to change the status of a server. If there is a difference between the current status of the server and the desired status, return true, otherwise false. ''' if current_status != new_status: return True return False defined_ports = slb_server_data.get('server', {}).get('port_list', []) current_status = slb_server_data.get('server', {}).get('status') # we check for a needed update several ways # - in case ports are missing from the ones specified by the user # - in case ports are missing from those on the device # - in case we are change the status of a server if (port_needs_update(defined_ports, slb_server_ports) or port_needs_update(slb_server_ports, defined_ports) or status_needs_update( current_status, axapi_enabled_disabled(slb_server_status))): result = axapi_call(module, session_url + '&method=slb.server.update', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to update the server: %s" % result['response']['err']['msg']) changed = True # if we changed things, get the full info regarding # the service group for the return data below if changed: result = axapi_call(module, session_url + '&method=slb.server.search', json.dumps({'name': slb_server})) else: result = slb_server_data elif state == 'absent': if slb_server_exists: result = axapi_call(module, session_url + '&method=slb.server.delete', json.dumps({'name': slb_server})) changed = True else: result = dict(msg="the server was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call( module, session_url + '&method=system.action.write_memory') if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out of the session nicely and exit axapi_call(module, session_url + '&method=session.close') module.exit_json(changed=changed, content=result)
login=target_user.get("login"), is_admin=target_user.get("isGrafanaAdmin")) param_dict = dict(email=email, name=name, login=login, is_admin=is_admin) return target_user_dict != param_dict def setup_module_object(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False, required_if=[ ['state', 'present', ['name', 'email']], ]) return module argument_spec = url_argument_spec() # remove unnecessary arguments del argument_spec['force'] del argument_spec['force_basic_auth'] del argument_spec['http_agent'] argument_spec.update( url=dict(type='str', required=True), url_username=dict(aliases=['grafana_user'], default='admin'), url_password=dict(aliases=['grafana_password'], type='str', required=True, no_log=True), state=dict(choices=['present', 'absent'], default='present'), name=dict(type='str', required=False), email=dict(type='str', required=False),
def main(): module_spec = { 'admin_url': dict(required=False, default='http://localhost:8001', type='str'), 'url_username': dict(required=False, default=None, type='str', aliases=['admin_username']), 'url_password': dict(required=False, default=None, type='str', aliases=['admin_password'], no_log=True), 'action': dict(required=True, default=None, type='str', choices=[ 'create', 'delete', 'find', 'healthy', 'unhealthy', 'list' ]), 'upstream_id': dict(required=False, default=None, type='str', include=True, uuid=True, aliases=['upstream']), 'target': dict(required=False, default=None, type='str', include=True), 'weight': dict(required=False, default=None, type='int', include=True), 'size': dict(required=False, default=None, type='int', include=True), 'offset': dict(required=False, default=None, type='int', include=True), 'created_at': dict(required=False, default=None, type='int', include=False), 'updated_at': dict(required=False, default=None, type='int', include=False) } argument_spec = url_argument_spec() argument_spec.update(module_spec) module = AnsibleModule(argument_spec=argument_spec) api = KongTargetApi(module) try: if api.action == 'create': result = api.required('upstream_id, target').create() elif api.action == 'delete': result = api.required('upstream_id, target').delete() elif api.action == 'find': result = api.required('upstream_id, target').find() elif api.action == 'healthy': result = api.required('upstream_id, target').healthy() elif api.action == 'unhealthy': result = api.required('upstream_id, target').unhealthy() elif api.action == 'list': result = api.required('upstream_id').list() except ValueError, error: result = {'message': str(error), 'failed': True}
def main(): argument_spec = url_argument_spec() argument_spec.update(dict( dest=dict(type='path'), url_username=dict(type='str', aliases=['user']), url_password=dict(type='str', aliases=['password'], no_log=True), body=dict(type='raw'), body_format=dict(type='str', default='raw', choices=['raw', 'json']), method=dict(type='str', default='GET', choices=['GET', 'POST', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'PATCH', 'TRACE', 'CONNECT', 'REFRESH']), return_content=dict(type='bool', default='no'), follow_redirects=dict(type='str', default='safe', choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']), creates=dict(type='path'), removes=dict(type='path'), status_code=dict(type='list', default=[200]), timeout=dict(type='int', default=30), headers=dict(type='dict', default={}) )) module = AnsibleModule( argument_spec=argument_spec, # TODO: Remove check_invalid_arguments in 2.9 check_invalid_arguments=False, add_file_common_args=True ) url = module.params['url'] body = module.params['body'] body_format = module.params['body_format'].lower() method = module.params['method'] dest = module.params['dest'] return_content = module.params['return_content'] creates = module.params['creates'] removes = module.params['removes'] status_code = [int(x) for x in list(module.params['status_code'])] socket_timeout = module.params['timeout'] dict_headers = module.params['headers'] if body_format == 'json': # Encode the body unless its a string, then assume it is pre-formatted JSON if not isinstance(body, six.string_types): body = json.dumps(body) lower_header_keys = [key.lower() for key in dict_headers] if 'content-type' not in lower_header_keys: dict_headers['Content-Type'] = 'application/json' # TODO: Deprecated section. Remove in Ansible 2.9 # Grab all the http headers. Need this hack since passing multi-values is # currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}') for key, value in six.iteritems(module.params): if key.startswith("HEADER_"): module.deprecate('Supplying headers via HEADER_* is deprecated. Please use `headers` to' ' supply headers for the request', version='2.9') skey = key.replace("HEADER_", "") dict_headers[skey] = value # End deprecated section if creates is not None: # do not run the command if the line contains creates=filename # and the filename already exists. This allows idempotence # of uri executions. if os.path.exists(creates): module.exit_json(stdout="skipped, since '%s' exists" % creates, changed=False, rc=0) if removes is not None: # do not run the command if the line contains removes=filename # and the filename do not exists. This allows idempotence # of uri executions. if not os.path.exists(removes): module.exit_json(stdout="skipped, since '%s' does not exist" % removes, changed=False, rc=0) # Make the request resp, content, dest = uri(module, url, dest, body, body_format, method, dict_headers, socket_timeout) resp['status'] = int(resp['status']) # Write the file out if requested if dest is not None: if resp['status'] == 304: changed = False else: write_file(module, url, dest, content) # allow file attribute changes changed = True module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest changed = module.set_fs_attributes_if_different(file_args, changed) resp['path'] = dest else: changed = False # Transmogrify the headers, replacing '-' with '_', since variables don't # work with dashes. # In python3, the headers are title cased. Lowercase them to be # compatible with the python2 behaviour. uresp = {} for key, value in six.iteritems(resp): ukey = key.replace("-", "_").lower() uresp[ukey] = value try: uresp['location'] = absolute_location(url, uresp['location']) except KeyError: pass # Default content_encoding to try content_encoding = 'utf-8' if 'content_type' in uresp: content_type, params = cgi.parse_header(uresp['content_type']) if 'charset' in params: content_encoding = params['charset'] u_content = to_text(content, encoding=content_encoding) if any(candidate in content_type for candidate in JSON_CANDIDATES): try: js = json.loads(u_content) uresp['json'] = js except: pass else: u_content = to_text(content, encoding=content_encoding) if resp['status'] not in status_code: uresp['msg'] = 'Status code was %s and not %s: %s' % (resp['status'], status_code, uresp.get('msg', '')) module.fail_json(content=u_content, **uresp) elif return_content: module.exit_json(changed=changed, content=u_content, **uresp) else: module.exit_json(changed=changed, **uresp)
def main(): argument_spec = url_argument_spec() argument_spec.update( dict(dest=dict(required=False, default=None, type='path'), url_username=dict(required=False, default=None, aliases=['user']), url_password=dict(required=False, default=None, aliases=['password']), body=dict(required=False, default=None, type='raw'), body_format=dict(required=False, default='raw', choices=['raw', 'json']), method=dict(required=False, default='GET', choices=[ 'GET', 'POST', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'PATCH', 'TRACE', 'CONNECT', 'REFRESH' ]), return_content=dict(required=False, default='no', type='bool'), follow_redirects=dict( required=False, default='safe', choices=['all', 'safe', 'none', 'yes', 'no']), creates=dict(required=False, default=None, type='path'), removes=dict(required=False, default=None, type='path'), status_code=dict(required=False, default=[200], type='list'), timeout=dict(required=False, default=30, type='int'), headers=dict(required=False, type='dict', default={}))) module = AnsibleModule(argument_spec=argument_spec, check_invalid_arguments=False, add_file_common_args=True) url = module.params['url'] body = module.params['body'] body_format = module.params['body_format'].lower() method = module.params['method'] dest = module.params['dest'] return_content = module.params['return_content'] creates = module.params['creates'] removes = module.params['removes'] status_code = [int(x) for x in list(module.params['status_code'])] socket_timeout = module.params['timeout'] dict_headers = module.params['headers'] if body_format == 'json': # Encode the body unless its a string, then assume it is pre-formatted JSON if not isinstance(body, six.string_types): body = json.dumps(body) lower_header_keys = [key.lower() for key in dict_headers] if 'content-type' not in lower_header_keys: dict_headers['Content-Type'] = 'application/json' # Grab all the http headers. Need this hack since passing multi-values is # currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}') for key, value in six.iteritems(module.params): if key.startswith("HEADER_"): skey = key.replace("HEADER_", "") dict_headers[skey] = value if creates is not None: # do not run the command if the line contains creates=filename # and the filename already exists. This allows idempotence # of uri executions. if os.path.exists(creates): module.exit_json(stdout="skipped, since %s exists" % creates, changed=False, stderr=False, rc=0) if removes is not None: # do not run the command if the line contains removes=filename # and the filename do not exists. This allows idempotence # of uri executions. if not os.path.exists(removes): module.exit_json(stdout="skipped, since %s does not exist" % removes, changed=False, stderr=False, rc=0) # Make the request resp, content, dest = uri(module, url, dest, body, body_format, method, dict_headers, socket_timeout) resp['status'] = int(resp['status']) # Write the file out if requested if dest is not None: if resp['status'] == 304: changed = False else: write_file(module, url, dest, content) # allow file attribute changes changed = True module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest changed = module.set_fs_attributes_if_different(file_args, changed) resp['path'] = dest else: changed = False # Transmogrify the headers, replacing '-' with '_', since variables dont # work with dashes. # In python3, the headers are title cased. Lowercase them to be # compatible with the python2 behaviour. uresp = {} for key, value in six.iteritems(resp): ukey = key.replace("-", "_").lower() uresp[ukey] = value try: uresp['location'] = absolute_location(url, uresp['location']) except KeyError: pass # Default content_encoding to try content_encoding = 'utf-8' if 'content_type' in uresp: content_type, params = cgi.parse_header(uresp['content_type']) if 'charset' in params: content_encoding = params['charset'] u_content = to_text(content, encoding=content_encoding) if 'application/json' in content_type or 'text/json' in content_type: try: js = json.loads(u_content) uresp['json'] = js except: pass else: u_content = to_text(content, encoding=content_encoding) if resp['status'] not in status_code: uresp['msg'] = 'Status code was not %s: %s' % (status_code, uresp.get('msg', '')) module.fail_json(content=u_content, **uresp) elif return_content: module.exit_json(changed=changed, content=u_content, **uresp) else: module.exit_json(changed=changed, **uresp)
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( operation=dict(type='str', default='create', choices=['create', 'update', 'delete']), server_name=dict(type='str', aliases=['server'], required=True), server_ip=dict(type='str', aliases=['ip', 'address'], required=True), server_status=dict(type='str', default='enable', aliases=['action'], choices=['enable', 'disable']), server_ports=dict(type='list', aliases=['port'], default=[]), ) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=False ) host = module.params['host'] username = module.params['username'] password = module.params['password'] operation = module.params['operation'] write_config = module.params['write_config'] slb_server = module.params['server_name'] slb_server_ip = module.params['server_ip'] slb_server_status = module.params['server_status'] slb_server_ports = module.params['server_ports'] axapi_base_url = 'https://{}/axapi/v3/'.format(host) axapi_auth_url = axapi_base_url + 'auth/' signature = axapi_authenticate_v3(module, axapi_auth_url, username, password) # validate the ports data structure validate_ports(module, slb_server_ports) json_post = { "server-list": [ { "name": slb_server, "host": slb_server_ip } ] } # add optional module parameters if slb_server_ports: json_post['server-list'][0]['port-list'] = slb_server_ports if slb_server_status: json_post['server-list'][0]['action'] = slb_server_status slb_server_data = axapi_call_v3(module, axapi_base_url+'slb/server/', method='GET', body='', signature=signature) # for empty slb server list if axapi_failure(slb_server_data): slb_server_exists = False else: slb_server_list = [server['name'] for server in slb_server_data['server-list']] if slb_server in slb_server_list: slb_server_exists = True else: slb_server_exists = False changed = False if operation == 'create': if slb_server_exists is False: result = axapi_call_v3(module, axapi_base_url+'slb/server/', method='POST', body=json.dumps(json_post), signature=signature) if axapi_failure(result): module.fail_json(msg="failed to create the server: %s" % result['response']['err']['msg']) changed = True else: module.fail_json(msg="server already exists, use state='update' instead") changed = False # if we changed things, get the full info regarding result if changed: result = axapi_call_v3(module, axapi_base_url + 'slb/server/' + slb_server, method='GET', body='', signature=signature) else: result = slb_server_data elif operation == 'delete': if slb_server_exists: result = axapi_call_v3(module, axapi_base_url + 'slb/server/' + slb_server, method='DELETE', body='', signature=signature) if axapi_failure(result): module.fail_json(msg="failed to delete server: %s" % result['response']['err']['msg']) changed = True else: result = dict(msg="the server was not present") elif operation == 'update': if slb_server_exists: result = axapi_call_v3(module, axapi_base_url + 'slb/server/', method='PUT', body=json.dumps(json_post), signature=signature) if axapi_failure(result): module.fail_json(msg="failed to update server: %s" % result['response']['err']['msg']) changed = True else: result = dict(msg="the server was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call_v3(module, axapi_base_url+'write/memory/', method='POST', body='', signature=signature) if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out gracefully and exit axapi_call_v3(module, axapi_base_url + 'logoff/', method='POST', body='', signature=signature) module.exit_json(changed=changed, content=result)
def main(): # use the predefined argument spec for url argument_spec = url_argument_spec() # remove unnecessary arguments del argument_spec['force'] del argument_spec['force_basic_auth'] del argument_spec['http_agent'] argument_spec.update( state=dict(choices=['present', 'absent', 'export'], default='present'), url=dict(aliases=['grafana_url'], required=True), url_username=dict(aliases=['grafana_user'], default='admin'), url_password=dict(aliases=['grafana_password'], default='admin', no_log=True), grafana_api_key=dict(type='str', no_log=True), org_id=dict(default=1, type='int'), folder=dict(type='str', default='General'), uid=dict(type='str'), slug=dict(type='str'), path=dict(type='str'), overwrite=dict(type='bool', default=False), commit_message=dict( type='str', aliases=['message'], deprecated_aliases=[dict(name='message', version='2.14')]), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=False, required_together=[['url_username', 'url_password', 'org_id']], mutually_exclusive=[['grafana_user', 'grafana_api_key'], ['uid', 'slug']], ) if 'message' in module.params: module.fail_json( msg= "'message' is reserved keyword, please change this parameter to 'commit_message'" ) try: if module.params['state'] == 'present': result = grafana_create_dashboard(module, module.params) elif module.params['state'] == 'absent': result = grafana_delete_dashboard(module, module.params) else: result = grafana_export_dashboard(module, module.params) except GrafanaAPIException as e: module.fail_json(failed=True, msg="error : %s" % to_native(e)) return except GrafanaMalformedJson as e: module.fail_json(failed=True, msg="error : %s" % to_native(e)) return except GrafanaDeleteException as e: module.fail_json(failed=True, msg="error : Can't delete dashboard : %s" % to_native(e)) return except GrafanaExportException as e: module.fail_json(failed=True, msg="error : Can't export dashboard : %s" % to_native(e)) return module.exit_json(failed=False, **result) return
def main(): argument_spec = url_argument_spec() argument_spec.update( dest=dict(type='path'), url_username=dict(type='str', aliases=['user']), url_password=dict(type='str', aliases=['password'], no_log=True), body=dict(type='raw'), body_format=dict(type='str', default='raw', choices=['form-urlencoded', 'json', 'raw', 'form-multipart']), src=dict(type='path'), method=dict(type='str', default='GET'), return_content=dict(type='bool', default=False), follow_redirects=dict(type='str', default='safe', choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']), creates=dict(type='path'), removes=dict(type='path'), status_code=dict(type='list', elements='int', default=[200]), timeout=dict(type='int', default=30), headers=dict(type='dict', default={}), unix_socket=dict(type='path'), remote_src=dict(type='bool', default=False), ca_path=dict(type='path', default=None), unredirected_headers=dict(type='list', elements='str', default=[]), decompress=dict(type='bool', default=True), ) module = AnsibleModule( argument_spec=argument_spec, add_file_common_args=True, mutually_exclusive=[['body', 'src']], ) url = module.params['url'] body = module.params['body'] body_format = module.params['body_format'].lower() method = module.params['method'].upper() dest = module.params['dest'] return_content = module.params['return_content'] creates = module.params['creates'] removes = module.params['removes'] status_code = [int(x) for x in list(module.params['status_code'])] socket_timeout = module.params['timeout'] ca_path = module.params['ca_path'] dict_headers = module.params['headers'] unredirected_headers = module.params['unredirected_headers'] decompress = module.params['decompress'] if not re.match('^[A-Z]+$', method): module.fail_json(msg="Parameter 'method' needs to be a single word in uppercase, like GET or POST.") if body_format == 'json': # Encode the body unless its a string, then assume it is pre-formatted JSON if not isinstance(body, string_types): body = json.dumps(body) if 'content-type' not in [header.lower() for header in dict_headers]: dict_headers['Content-Type'] = 'application/json' elif body_format == 'form-urlencoded': if not isinstance(body, string_types): try: body = form_urlencoded(body) except ValueError as e: module.fail_json(msg='failed to parse body as form_urlencoded: %s' % to_native(e), elapsed=0) if 'content-type' not in [header.lower() for header in dict_headers]: dict_headers['Content-Type'] = 'application/x-www-form-urlencoded' elif body_format == 'form-multipart': try: content_type, body = prepare_multipart(body) except (TypeError, ValueError) as e: module.fail_json(msg='failed to parse body as form-multipart: %s' % to_native(e)) dict_headers['Content-Type'] = content_type if creates is not None: # do not run the command if the line contains creates=filename # and the filename already exists. This allows idempotence # of uri executions. if os.path.exists(creates): module.exit_json(stdout="skipped, since '%s' exists" % creates, changed=False) if removes is not None: # do not run the command if the line contains removes=filename # and the filename does not exist. This allows idempotence # of uri executions. if not os.path.exists(removes): module.exit_json(stdout="skipped, since '%s' does not exist" % removes, changed=False) # Make the request start = datetime.datetime.utcnow() r, info = uri(module, url, dest, body, body_format, method, dict_headers, socket_timeout, ca_path, unredirected_headers, decompress) elapsed = (datetime.datetime.utcnow() - start).seconds if r and dest is not None and os.path.isdir(dest): filename = get_response_filename(r) or 'index.html' dest = os.path.join(dest, filename) if r and r.fp is not None: # r may be None for some errors # r.fp may be None depending on the error, which means there are no headers either content_type, main_type, sub_type, content_encoding = parse_content_type(r) else: content_type = 'application/octet-stream' main_type = 'aplication' sub_type = 'octet-stream' content_encoding = 'utf-8' maybe_json = content_type and any(candidate in sub_type for candidate in JSON_CANDIDATES) maybe_output = maybe_json or return_content or info['status'] not in status_code if maybe_output: try: if PY3 and (r.fp is None or r.closed): raise TypeError content = r.read() except (AttributeError, TypeError): # there was no content, but the error read() # may have been stored in the info as 'body' content = info.pop('body', b'') elif r: content = r else: content = None resp = {} resp['redirected'] = info['url'] != url resp.update(info) resp['elapsed'] = elapsed resp['status'] = int(resp['status']) resp['changed'] = False # Write the file out if requested if r and dest is not None: if resp['status'] in status_code and resp['status'] != 304: write_file(module, dest, content, resp) # allow file attribute changes resp['changed'] = True module.params['path'] = dest file_args = module.load_file_common_arguments(module.params, path=dest) resp['changed'] = module.set_fs_attributes_if_different(file_args, resp['changed']) resp['path'] = dest # Transmogrify the headers, replacing '-' with '_', since variables don't # work with dashes. # In python3, the headers are title cased. Lowercase them to be # compatible with the python2 behaviour. uresp = {} for key, value in iteritems(resp): ukey = key.replace("-", "_").lower() uresp[ukey] = value if 'location' in uresp: uresp['location'] = absolute_location(url, uresp['location']) # Default content_encoding to try if isinstance(content, binary_type): u_content = to_text(content, encoding=content_encoding) if maybe_json: try: js = json.loads(u_content) uresp['json'] = js except Exception: if PY2: sys.exc_clear() # Avoid false positive traceback in fail_json() on Python 2 else: u_content = None if module.no_log_values: uresp = sanitize_keys(uresp, module.no_log_values, NO_MODIFY_KEYS) if resp['status'] not in status_code: uresp['msg'] = 'Status code was %s and not %s: %s' % (resp['status'], status_code, uresp.get('msg', '')) if return_content: module.fail_json(content=u_content, **uresp) else: module.fail_json(**uresp) elif return_content: module.exit_json(content=u_content, **uresp) else: module.exit_json(**uresp)
__metaclass__ = type from ansible.module_utils.urls import url_argument_spec import json # import pdb # Seed the result result = dict( changed=False, name='default', msg='the best is coming' ) # Socket information idg_endpoint_spec = url_argument_spec() idg_endpoint_spec.update( timeout=dict(type='int', default=10), # The socket level timeout in seconds server=dict(type='str', required=True), # Remote IDG will be used. server_port=dict(type='int', default=5554), # Remote IDG port be used. url_username=dict(required=False, aliases=['user']), url_password=dict(required=False, aliases=['password'], no_log=True), ) class IDG_Utils(object): """ Class class with very useful things """ ############################# # Constants #############################
def main(): argument_spec = url_argument_spec() argument_spec.update( url=dict(type='str', required=True), dest=dict(type='path', required=True), backup=dict(type='bool'), sha256sum=dict(type='str', default=''), checksum=dict(type='str', default=''), timeout=dict(type='int', default=10), headers=dict(type='raw'), tmp_dest=dict(type='path'), ) module = AnsibleModule( # not checking because of daisy chain to file module argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, mutually_exclusive=(['checksum', 'sha256sum']), ) url = module.params['url'] dest = module.params['dest'] backup = module.params['backup'] force = module.params['force'] sha256sum = module.params['sha256sum'] checksum = module.params['checksum'] use_proxy = module.params['use_proxy'] timeout = module.params['timeout'] tmp_dest = module.params['tmp_dest'] # Parse headers to dict if isinstance(module.params['headers'], dict): headers = module.params['headers'] elif module.params['headers']: try: headers = dict(item.split(':', 1) for item in module.params['headers'].split(',')) module.deprecate('Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`', version='2.10') except Exception: module.fail_json(msg="The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed.") else: headers = None dest_is_dir = os.path.isdir(dest) last_mod_time = None # workaround for usage of deprecated sha256sum parameter if sha256sum: checksum = 'sha256:%s' % (sha256sum) # checksum specified, parse for algorithm and checksum if checksum: try: algorithm, checksum = checksum.rsplit(':', 1) # Remove any non-alphanumeric characters, including the infamous # Unicode zero-width space checksum = re.sub(r'\W+', '', checksum).lower() # Ensure the checksum portion is a hexdigest int(checksum, 16) except ValueError: module.fail_json(msg="The checksum parameter has to be in format <algorithm>:<checksum>") if not dest_is_dir and os.path.exists(dest): checksum_mismatch = False # If the download is not forced and there is a checksum, allow # checksum match to skip the download. if not force and checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum == destination_checksum: module.exit_json(msg="file already exists", dest=dest, url=url, changed=False) checksum_mismatch = True # Not forcing redownload, unless checksum does not match if not force and not checksum_mismatch: # allow file attribute changes module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest changed = module.set_fs_attributes_if_different(file_args, False) if changed: module.exit_json(msg="file already exists but file attributes changed", dest=dest, url=url, changed=changed) module.exit_json(msg="file already exists", dest=dest, url=url, changed=changed) # If the file already exists, prepare the last modified time for the # request. mtime = os.path.getmtime(dest) last_mod_time = datetime.datetime.utcfromtimestamp(mtime) # If the checksum does not match we have to force the download # because last_mod_time may be newer than on remote if checksum_mismatch: force = True # download to tmpsrc tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest) # Now the request has completed, we can finally generate the final # destination file name from the info dict. if dest_is_dir: filename = extract_filename_from_headers(info) if not filename: # Fall back to extracting the filename from the URL. # Pluck the URL from the info, since a redirect could have changed # it. filename = url_filename(info['url']) dest = os.path.join(dest, filename) checksum_src = None checksum_dest = None # If the remote URL exists, we're done with check mode if module.check_mode: os.remove(tmpsrc) res_args = dict(url=url, dest=dest, src=tmpsrc, changed=True, msg=info.get('msg', '')) module.exit_json(**res_args) # raise an error if there is no tmpsrc file if not os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg']) if not os.access(tmpsrc, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Source %s is not readable" % (tmpsrc)) checksum_src = module.sha1(tmpsrc) # check if there is no dest file if os.path.exists(dest): # raise an error if copy has no permission on dest if not os.access(dest, os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (dest)) if not os.access(dest, os.R_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not readable" % (dest)) checksum_dest = module.sha1(dest) else: if not os.path.exists(os.path.dirname(dest)): os.remove(tmpsrc) module.fail_json(msg="Destination %s does not exist" % (os.path.dirname(dest))) if not os.access(os.path.dirname(dest), os.W_OK): os.remove(tmpsrc) module.fail_json(msg="Destination %s is not writable" % (os.path.dirname(dest))) backup_file = None if checksum_src != checksum_dest: try: if backup: if os.path.exists(dest): backup_file = module.backup_local(dest) module.atomic_move(tmpsrc, dest) except Exception as e: if os.path.exists(tmpsrc): os.remove(tmpsrc) module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)), exception=traceback.format_exc()) changed = True else: changed = False if os.path.exists(tmpsrc): os.remove(tmpsrc) if checksum != '': destination_checksum = module.digest_from_file(dest, algorithm) if checksum != destination_checksum: os.remove(dest) module.fail_json(msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum)) # allow file attribute changes module.params['path'] = dest file_args = module.load_file_common_arguments(module.params) file_args['path'] = dest changed = module.set_fs_attributes_if_different(file_args, changed) # Backwards compat only. We'll return None on FIPS enabled systems try: md5sum = module.md5(dest) except ValueError: md5sum = None res_args = dict( url=url, dest=dest, src=tmpsrc, md5sum=md5sum, checksum_src=checksum_src, checksum_dest=checksum_dest, changed=changed, msg=info.get('msg', ''), status_code=info.get('status', '') ) if backup_file: res_args['backup_file'] = backup_file # Mission complete module.exit_json(**res_args)
def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( operation=dict(type='str', default='create', choices=['create', 'update', 'delete']), server_name=dict(type='str', aliases=['server'], required=True), server_ip=dict(type='str', aliases=['ip', 'address'], required=True), server_status=dict(type='str', default='enable', aliases=['action'], choices=['enable', 'disable']), server_ports=dict(type='list', aliases=['port'], default=[]), )) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) host = module.params['host'] username = module.params['username'] password = module.params['password'] operation = module.params['operation'] write_config = module.params['write_config'] slb_server = module.params['server_name'] slb_server_ip = module.params['server_ip'] slb_server_status = module.params['server_status'] slb_server_ports = module.params['server_ports'] axapi_base_url = 'https://{}/axapi/v3/'.format(host) axapi_auth_url = axapi_base_url + 'auth/' signature = axapi_authenticate_v3(module, axapi_auth_url, username, password) # validate the ports data structure validate_ports(module, slb_server_ports) json_post = {"server-list": [{"name": slb_server, "host": slb_server_ip}]} # add optional module parameters if slb_server_ports: json_post['server-list'][0]['port-list'] = slb_server_ports if slb_server_status: json_post['server-list'][0]['action'] = slb_server_status slb_server_data = axapi_call_v3(module, axapi_base_url + 'slb/server/', method='GET', body='', signature=signature) # for empty slb server list if axapi_failure(slb_server_data): slb_server_exists = False else: slb_server_list = [ server['name'] for server in slb_server_data['server-list'] ] if slb_server in slb_server_list: slb_server_exists = True else: slb_server_exists = False changed = False if operation == 'create': if slb_server_exists == False: result = axapi_call_v3(module, axapi_base_url + 'slb/server/', method='POST', body=json.dumps(json_post), signature=signature) if axapi_failure(result): module.fail_json(msg="failed to create the server: %s" % result['response']['err']['msg']) changed = True else: module.fail_json( msg="server already exists, use state='update' instead") changed = False # if we changed things, get the full info regarding result if changed: result = axapi_call_v3(module, axapi_base_url + 'slb/server/' + slb_server, method='GET', body='', signature=signature) else: result = slb_server_data elif operation == 'delete': if slb_server_exists: result = axapi_call_v3(module, axapi_base_url + 'slb/server/' + slb_server, method='DELETE', body='', signature=signature) if axapi_failure(result): module.fail_json(msg="failed to delete server: %s" % result['response']['err']['msg']) changed = True else: result = dict(msg="the server was not present") elif operation == 'update': if slb_server_exists: result = axapi_call_v3(module, axapi_base_url + 'slb/server/', method='PUT', body=json.dumps(json_post), signature=signature) if axapi_failure(result): module.fail_json(msg="failed to update server: %s" % result['response']['err']['msg']) changed = True else: result = dict(msg="the server was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call_v3(module, axapi_base_url + 'write/memory/', method='POST', body='', signature=signature) if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out gracefully and exit axapi_call_v3(module, axapi_base_url + 'logoff/', method='POST', body='', signature=signature) module.exit_json(changed=changed, content=result)
def main(): # Module arguments argument_spec = url_argument_spec() argument_spec.update( group=dict(default='jenkins'), jenkins_home=dict(default='/var/lib/jenkins'), mode=dict(default='0644', type='raw'), name=dict(required=True), owner=dict(default='jenkins'), params=dict(type='dict'), state=dict(choices=[ 'present', 'absent', 'pinned', 'unpinned', 'enabled', 'disabled', 'latest' ], default='present'), timeout=dict(default=30, type="int"), updates_expiration=dict(default=86400, type="int"), updates_url=dict(default='https://updates.jenkins-ci.org'), url=dict(default='http://localhost:8080'), url_password=dict(no_log=True), version=dict(), with_dependencies=dict(default=True, type='bool'), ) # Module settings module = AnsibleModule( argument_spec=argument_spec, add_file_common_args=True, supports_check_mode=True, ) # Update module parameters by user's parameters if defined if 'params' in module.params and isinstance(module.params['params'], dict): module.params.update(module.params['params']) # Remove the params module.params.pop('params', None) # Force basic authentication module.params['force_basic_auth'] = True # Convert timeout to float try: module.params['timeout'] = float(module.params['timeout']) except ValueError: e = get_exception() module.fail_json(msg='Cannot convert %s to float.' % module.params['timeout'], details=to_native(e)) # Set version to latest if state is latest if module.params['state'] == 'latest': module.params['state'] = 'present' module.params['version'] = 'latest' # Create some shortcuts name = module.params['name'] state = module.params['state'] # Initial change state of the task changed = False # Instantiate the JenkinsPlugin object jp = JenkinsPlugin(module) # Perform action depending on the requested state if state == 'present': changed = jp.install() elif state == 'absent': changed = jp.uninstall() elif state == 'pinned': changed = jp.pin() elif state == 'unpinned': changed = jp.unpin() elif state == 'enabled': changed = jp.enable() elif state == 'disabled': changed = jp.disable() # Print status of the change module.exit_json(changed=changed, plugin=name, state=state)
def main(): argument_spec = url_argument_spec() argument_spec.update( add_export_distributor=dict(default=False, type='bool'), feed=dict(), generate_sqlite=dict(default=False, type='bool'), feed_ca_cert=dict( aliases=['importer_ssl_ca_cert', 'ca_cert'], deprecated_aliases=[dict(name='ca_cert', version='2.14')]), feed_client_cert=dict(aliases=['importer_ssl_client_cert']), feed_client_key=dict(aliases=['importer_ssl_client_key']), name=dict(required=True, aliases=['repo']), proxy_host=dict(), proxy_port=dict(), proxy_username=dict(), proxy_password=dict(no_log=True), publish_distributor=dict(), pulp_host=dict(default="https://127.0.0.1"), relative_url=dict(), repo_type=dict(default="rpm"), repoview=dict(default=False, type='bool'), serve_http=dict(default=False, type='bool'), serve_https=dict(default=True, type='bool'), state=dict(default="present", choices=['absent', 'present', 'sync', 'publish']), wait_for_completion=dict(default=False, type="bool")) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) add_export_distributor = module.params['add_export_distributor'] feed = module.params['feed'] generate_sqlite = module.params['generate_sqlite'] importer_ssl_ca_cert = module.params['feed_ca_cert'] importer_ssl_client_cert = module.params['feed_client_cert'] if importer_ssl_client_cert is None and module.params[ 'client_cert'] is not None: importer_ssl_client_cert = module.params['client_cert'] module.deprecate( "To specify client certificates to be used with the repo to sync, and not for communication with the " "Pulp instance, use the new options `feed_client_cert` and `feed_client_key` (available since " "Ansible 2.9.2). Until Ansible 2.14, the default value for `feed_client_cert` will be taken from " "`client_cert` if only the latter is specified", version="2.14") importer_ssl_client_key = module.params['feed_client_key'] if importer_ssl_client_key is None and module.params[ 'client_key'] is not None: importer_ssl_client_key = module.params['client_key'] module.deprecate( "In Ansible 2.9.2 `feed_client_key` option was added. Until 2.14 the default value will come from client_key option", version="2.14") proxy_host = module.params['proxy_host'] proxy_port = module.params['proxy_port'] proxy_username = module.params['proxy_username'] proxy_password = module.params['proxy_password'] publish_distributor = module.params['publish_distributor'] pulp_host = module.params['pulp_host'] relative_url = module.params['relative_url'] repo = module.params['name'] repo_type = module.params['repo_type'] repoview = module.params['repoview'] serve_http = module.params['serve_http'] serve_https = module.params['serve_https'] state = module.params['state'] wait_for_completion = module.params['wait_for_completion'] if (state == 'present') and (not relative_url): module.fail_json( msg="When state is present, relative_url is required.") # Ensure that the importer_ssl_* is the content and not a file path if importer_ssl_ca_cert is not None: importer_ssl_ca_cert_file_path = os.path.abspath(importer_ssl_ca_cert) if os.path.isfile(importer_ssl_ca_cert_file_path): importer_ssl_ca_cert_file_object = open( importer_ssl_ca_cert_file_path, 'r') try: importer_ssl_ca_cert = importer_ssl_ca_cert_file_object.read() finally: importer_ssl_ca_cert_file_object.close() if importer_ssl_client_cert is not None: importer_ssl_client_cert_file_path = os.path.abspath( importer_ssl_client_cert) if os.path.isfile(importer_ssl_client_cert_file_path): importer_ssl_client_cert_file_object = open( importer_ssl_client_cert_file_path, 'r') try: importer_ssl_client_cert = importer_ssl_client_cert_file_object.read( ) finally: importer_ssl_client_cert_file_object.close() if importer_ssl_client_key is not None: importer_ssl_client_key_file_path = os.path.abspath( importer_ssl_client_key) if os.path.isfile(importer_ssl_client_key_file_path): importer_ssl_client_key_file_object = open( importer_ssl_client_key_file_path, 'r') try: importer_ssl_client_key = importer_ssl_client_key_file_object.read( ) finally: importer_ssl_client_key_file_object.close() server = pulp_server(module, pulp_host, repo_type, wait_for_completion=wait_for_completion) server.set_repo_list() repo_exists = server.check_repo_exists(repo) changed = False if state == 'absent' and repo_exists: if not module.check_mode: server.delete_repo(repo) changed = True if state == 'sync': if not repo_exists: module.fail_json( msg= "Repository was not found. The repository can not be synced.") if not module.check_mode: server.sync_repo(repo) changed = True if state == 'publish': if not repo_exists: module.fail_json( msg= "Repository was not found. The repository can not be published." ) if not module.check_mode: server.publish_repo(repo, publish_distributor) changed = True if state == 'present': if not repo_exists: if not module.check_mode: server.create_repo( repo_id=repo, relative_url=relative_url, feed=feed, generate_sqlite=generate_sqlite, serve_http=serve_http, serve_https=serve_https, proxy_host=proxy_host, proxy_port=proxy_port, proxy_username=proxy_username, proxy_password=proxy_password, repoview=repoview, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key, add_export_distributor=add_export_distributor) changed = True else: # Check to make sure all the settings are correct # The importer config gets overwritten on set and not updated, so # we set the whole config at the same time. if not server.compare_repo_importer_config( repo, feed=feed, proxy_host=proxy_host, proxy_port=proxy_port, proxy_username=proxy_username, proxy_password=proxy_password, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key): if not module.check_mode: server.update_repo_importer_config( repo, feed=feed, proxy_host=proxy_host, proxy_port=proxy_port, proxy_username=proxy_username, proxy_password=proxy_password, ssl_ca_cert=importer_ssl_ca_cert, ssl_client_cert=importer_ssl_client_cert, ssl_client_key=importer_ssl_client_key) changed = True if relative_url is not None: if not server.compare_repo_distributor_config( repo, relative_url=relative_url): if not module.check_mode: server.update_repo_distributor_config( repo, relative_url=relative_url) changed = True if not server.compare_repo_distributor_config( repo, generate_sqlite=generate_sqlite): if not module.check_mode: server.update_repo_distributor_config( repo, generate_sqlite=generate_sqlite) changed = True if not server.compare_repo_distributor_config(repo, repoview=repoview): if not module.check_mode: server.update_repo_distributor_config(repo, repoview=repoview) changed = True if not server.compare_repo_distributor_config(repo, http=serve_http): if not module.check_mode: server.update_repo_distributor_config(repo, http=serve_http) changed = True if not server.compare_repo_distributor_config(repo, https=serve_https): if not module.check_mode: server.update_repo_distributor_config(repo, https=serve_https) changed = True module.exit_json(changed=changed, repo=repo)